ansible-playbook [core 2.16.15rc1] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-fud executable location = /usr/local/bin/ansible-playbook python version = 3.12.1 (main, Feb 21 2024, 14:18:26) [GCC 8.5.0 20210514 (Red Hat 8.5.0-21)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_set_mount.yml ************************************************** 1 plays in /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml PLAY [Mount snapshots of logical volumes across different volume groups] ******* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:2 Saturday 06 December 2025 17:27:46 -0500 (0:00:00.018) 0:00:00.018 ***** ok: [managed-node1] TASK [Setup] ******************************************************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:63 Saturday 06 December 2025 17:27:48 -0500 (0:00:01.194) 0:00:01.212 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml for managed-node1 TASK [Check if system is ostree] *********************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:10 Saturday 06 December 2025 17:27:48 -0500 (0:00:00.024) 0:00:01.236 ***** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [Set mount parent] ******************************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:15 Saturday 06 December 2025 17:27:48 -0500 (0:00:00.407) 0:00:01.644 ***** ok: [managed-node1] => { "ansible_facts": { "test_mnt_parent": "/mnt" }, "changed": false } TASK [Run the storage role install base packages] ****************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:19 Saturday 06 December 2025 17:27:48 -0500 (0:00:00.042) 0:00:01.687 ***** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 06 December 2025 17:27:48 -0500 (0:00:00.054) 0:00:01.741 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 06 December 2025 17:27:48 -0500 (0:00:00.042) 0:00:01.784 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 06 December 2025 17:27:48 -0500 (0:00:00.053) 0:00:01.837 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 06 December 2025 17:27:48 -0500 (0:00:00.074) 0:00:01.912 ***** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 06 December 2025 17:27:49 -0500 (0:00:00.355) 0:00:02.267 ***** ok: [managed-node1] => { "ansible_facts": { "__storage_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 06 December 2025 17:27:49 -0500 (0:00:00.031) 0:00:02.299 ***** ok: [managed-node1] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 06 December 2025 17:27:49 -0500 (0:00:00.027) 0:00:02.326 ***** ok: [managed-node1] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 06 December 2025 17:27:49 -0500 (0:00:00.022) 0:00:02.349 ***** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Saturday 06 December 2025 17:27:49 -0500 (0:00:00.071) 0:00:02.421 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet stratis-cli stratisd vdo xfsprogs TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 06 December 2025 17:27:52 -0500 (0:00:02.729) 0:00:05.151 ***** ok: [managed-node1] => { "storage_pools | d([])": [] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Saturday 06 December 2025 17:27:52 -0500 (0:00:00.037) 0:00:05.189 ***** ok: [managed-node1] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Saturday 06 December 2025 17:27:52 -0500 (0:00:00.035) 0:00:05.224 ***** ok: [managed-node1] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 06 December 2025 17:27:52 -0500 (0:00:00.625) 0:00:05.849 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Saturday 06 December 2025 17:27:52 -0500 (0:00:00.031) 0:00:05.881 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Saturday 06 December 2025 17:27:52 -0500 (0:00:00.031) 0:00:05.913 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Saturday 06 December 2025 17:27:52 -0500 (0:00:00.031) 0:00:05.945 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Saturday 06 December 2025 17:27:52 -0500 (0:00:00.031) 0:00:05.977 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Saturday 06 December 2025 17:27:55 -0500 (0:00:02.427) 0:00:08.404 ***** ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "auto-cpufreq.service": { "name": "auto-cpufreq.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:112.service": { "name": "lvm2-pvscan@8:112.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:128.service": { "name": "lvm2-pvscan@8:128.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:144.service": { "name": "lvm2-pvscan@8:144.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:16.service": { "name": "lvm2-pvscan@8:16.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:32.service": { "name": "lvm2-pvscan@8:32.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:48.service": { "name": "lvm2-pvscan@8:48.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:64.service": { "name": "lvm2-pvscan@8:64.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:80.service": { "name": "lvm2-pvscan@8:80.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:96.service": { "name": "lvm2-pvscan@8:96.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "power-profiles-daemon.service": { "name": "power-profiles-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tlp.service": { "name": "tlp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Saturday 06 December 2025 17:27:56 -0500 (0:00:01.716) 0:00:10.121 ***** ok: [managed-node1] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Saturday 06 December 2025 17:27:57 -0500 (0:00:00.052) 0:00:10.173 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Saturday 06 December 2025 17:27:57 -0500 (0:00:00.016) 0:00:10.189 ***** ok: [managed-node1] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Saturday 06 December 2025 17:27:57 -0500 (0:00:00.542) 0:00:10.731 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "blivet_output is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Saturday 06 December 2025 17:27:57 -0500 (0:00:00.052) 0:00:10.784 ***** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1765059700.0174959, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "ab8070345774adad92683e9645714452be7be474", "ctime": 1765059644.069301, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 174064384, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1765059644.068301, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1343, "uid": 0, "version": "3298298534", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Saturday 06 December 2025 17:27:57 -0500 (0:00:00.329) 0:00:11.113 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "blivet_output is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 06 December 2025 17:27:57 -0500 (0:00:00.028) 0:00:11.142 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Saturday 06 December 2025 17:27:58 -0500 (0:00:00.016) 0:00:11.158 ***** ok: [managed-node1] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Saturday 06 December 2025 17:27:58 -0500 (0:00:00.030) 0:00:11.189 ***** ok: [managed-node1] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Saturday 06 December 2025 17:27:58 -0500 (0:00:00.022) 0:00:11.211 ***** ok: [managed-node1] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Saturday 06 December 2025 17:27:58 -0500 (0:00:00.024) 0:00:11.235 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Saturday 06 December 2025 17:27:58 -0500 (0:00:00.035) 0:00:11.271 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Saturday 06 December 2025 17:27:58 -0500 (0:00:00.036) 0:00:11.308 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Saturday 06 December 2025 17:27:58 -0500 (0:00:00.038) 0:00:11.346 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Saturday 06 December 2025 17:27:58 -0500 (0:00:00.038) 0:00:11.384 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Saturday 06 December 2025 17:27:58 -0500 (0:00:00.037) 0:00:11.422 ***** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1765059512.040086, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1716968941.893, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1716968586.525, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1157759751", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Saturday 06 December 2025 17:27:58 -0500 (0:00:00.325) 0:00:11.747 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Saturday 06 December 2025 17:27:58 -0500 (0:00:00.017) 0:00:11.764 ***** ok: [managed-node1] TASK [Get unused disks] ******************************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:25 Saturday 06 December 2025 17:27:59 -0500 (0:00:00.846) 0:00:12.610 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml for managed-node1 TASK [Check if system is ostree] *********************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:5 Saturday 06 December 2025 17:27:59 -0500 (0:00:00.035) 0:00:12.646 ***** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [Set flag to indicate system is ostree] *********************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:10 Saturday 06 December 2025 17:27:59 -0500 (0:00:00.320) 0:00:12.966 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_is_ostree": false }, "changed": false } TASK [Ensure test packages] **************************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:14 Saturday 06 December 2025 17:27:59 -0500 (0:00:00.027) 0:00:12.993 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: util-linux TASK [Find unused disks in the system] ***************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:23 Saturday 06 December 2025 17:28:02 -0500 (0:00:02.449) 0:00:15.443 ***** ok: [managed-node1] => { "changed": false, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ], "info": [ "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdj\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdk\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdl\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG-SEC=\"512\"", "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG-SEC=\"512\"", "filename [xvda1] is a partition", "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions" ] } TASK [Set unused_disks if necessary] ******************************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:31 Saturday 06 December 2025 17:28:02 -0500 (0:00:00.599) 0:00:16.042 ***** ok: [managed-node1] => { "ansible_facts": { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] }, "changed": false } TASK [Print unused disks] ****************************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:36 Saturday 06 December 2025 17:28:02 -0500 (0:00:00.028) 0:00:16.071 ***** ok: [managed-node1] => { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] } TASK [Print info from find_unused_disk] **************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:44 Saturday 06 December 2025 17:28:02 -0500 (0:00:00.026) 0:00:16.097 ***** skipping: [managed-node1] => { "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)" } TASK [Show disk information] *************************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:49 Saturday 06 December 2025 17:28:03 -0500 (0:00:00.051) 0:00:16.148 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)", "skip_reason": "Conditional result was False" } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:58 Saturday 06 December 2025 17:28:03 -0500 (0:00:00.042) 0:00:16.191 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)", "skip_reason": "Conditional result was False" } TASK [Create LVM logical volumes under volume groups] ************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:31 Saturday 06 December 2025 17:28:03 -0500 (0:00:00.041) 0:00:16.232 ***** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 06 December 2025 17:28:03 -0500 (0:00:00.082) 0:00:16.314 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 06 December 2025 17:28:03 -0500 (0:00:00.055) 0:00:16.370 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 06 December 2025 17:28:03 -0500 (0:00:00.073) 0:00:16.443 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 06 December 2025 17:28:03 -0500 (0:00:00.088) 0:00:16.532 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 06 December 2025 17:28:03 -0500 (0:00:00.035) 0:00:16.567 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 06 December 2025 17:28:03 -0500 (0:00:00.051) 0:00:16.619 ***** ok: [managed-node1] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 06 December 2025 17:28:03 -0500 (0:00:00.034) 0:00:16.653 ***** ok: [managed-node1] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 06 December 2025 17:28:03 -0500 (0:00:00.036) 0:00:16.690 ***** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Saturday 06 December 2025 17:28:03 -0500 (0:00:00.089) 0:00:16.779 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet stratis-cli stratisd vdo xfsprogs TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 06 December 2025 17:28:06 -0500 (0:00:02.580) 0:00:19.360 ***** ok: [managed-node1] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb", "sdc" ], "name": "test_vg1", "volumes": [ { "name": "lv1", "size": "15%" }, { "name": "lv2", "size": "50%" } ] }, { "disks": [ "sdd", "sde", "sdf" ], "name": "test_vg2", "volumes": [ { "name": "lv3", "size": "10%" }, { "name": "lv4", "size": "20%" } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "name": "test_vg3", "volumes": [ { "name": "lv5", "size": "30%" }, { "name": "lv6", "size": "25%" }, { "name": "lv7", "size": "10%" }, { "name": "lv8", "size": "10%" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Saturday 06 December 2025 17:28:06 -0500 (0:00:00.077) 0:00:19.437 ***** ok: [managed-node1] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Saturday 06 December 2025 17:28:06 -0500 (0:00:00.123) 0:00:19.561 ***** ok: [managed-node1] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 06 December 2025 17:28:10 -0500 (0:00:04.081) 0:00:23.642 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Saturday 06 December 2025 17:28:10 -0500 (0:00:00.040) 0:00:23.682 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Saturday 06 December 2025 17:28:10 -0500 (0:00:00.037) 0:00:23.720 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Saturday 06 December 2025 17:28:10 -0500 (0:00:00.038) 0:00:23.758 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Saturday 06 December 2025 17:28:10 -0500 (0:00:00.036) 0:00:23.794 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx lvm2 TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Saturday 06 December 2025 17:28:13 -0500 (0:00:02.461) 0:00:26.256 ***** ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "auto-cpufreq.service": { "name": "auto-cpufreq.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:112.service": { "name": "lvm2-pvscan@8:112.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:128.service": { "name": "lvm2-pvscan@8:128.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:144.service": { "name": "lvm2-pvscan@8:144.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:16.service": { "name": "lvm2-pvscan@8:16.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:32.service": { "name": "lvm2-pvscan@8:32.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:48.service": { "name": "lvm2-pvscan@8:48.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:64.service": { "name": "lvm2-pvscan@8:64.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:80.service": { "name": "lvm2-pvscan@8:80.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:96.service": { "name": "lvm2-pvscan@8:96.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "power-profiles-daemon.service": { "name": "power-profiles-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tlp.service": { "name": "tlp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Saturday 06 December 2025 17:28:14 -0500 (0:00:01.680) 0:00:27.936 ***** ok: [managed-node1] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Saturday 06 December 2025 17:28:14 -0500 (0:00:00.061) 0:00:27.998 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Saturday 06 December 2025 17:28:14 -0500 (0:00:00.020) 0:00:28.018 ***** changed: [managed-node1] => { "actions": [ { "action": "create format", "device": "/dev/sdj", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdi", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg3", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv8", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv8", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv7", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv7", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv6", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv6", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv5", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv5", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sde", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg2", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg2-lv4", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg2-lv4", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg2-lv3", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg2-lv3", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg1-lv2", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg1-lv2", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg1-lv1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg1-lv1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sdk", "/dev/sdl", "/dev/xvda1", "/dev/mapper/test_vg1-lv1", "/dev/mapper/test_vg1-lv2", "/dev/mapper/test_vg2-lv3", "/dev/mapper/test_vg2-lv4", "/dev/mapper/test_vg3-lv5", "/dev/mapper/test_vg3-lv6", "/dev/mapper/test_vg3-lv7", "/dev/mapper/test_vg3-lv8" ], "mounts": [], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_kernel_device": "/dev/dm-7", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "15%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_kernel_device": "/dev/dm-6", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv3", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv4", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_kernel_device": "/dev/dm-3", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "_raw_kernel_device": "/dev/dm-3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv5", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "30%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_kernel_device": "/dev/dm-2", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "_raw_kernel_device": "/dev/dm-2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv6", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv7", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv8", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Saturday 06 December 2025 17:28:22 -0500 (0:00:08.042) 0:00:36.061 ***** ok: [managed-node1] => { "changed": false, "cmd": [ "udevadm", "trigger", "--subsystem-match=block" ], "delta": "0:00:00.016659", "end": "2025-12-06 17:28:23.317475", "rc": 0, "start": "2025-12-06 17:28:23.300816" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Saturday 06 December 2025 17:28:23 -0500 (0:00:00.609) 0:00:36.671 ***** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1765059700.0174959, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "ab8070345774adad92683e9645714452be7be474", "ctime": 1765059644.069301, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 174064384, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1765059644.068301, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1343, "uid": 0, "version": "3298298534", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Saturday 06 December 2025 17:28:23 -0500 (0:00:00.366) 0:00:37.038 ***** ok: [managed-node1] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 06 December 2025 17:28:24 -0500 (0:00:00.538) 0:00:37.576 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Saturday 06 December 2025 17:28:24 -0500 (0:00:00.025) 0:00:37.602 ***** ok: [managed-node1] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sdj", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdi", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg3", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv8", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv8", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv7", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv7", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv6", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv6", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv5", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv5", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sde", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg2", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg2-lv4", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg2-lv4", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg2-lv3", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg2-lv3", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg1-lv2", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg1-lv2", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg1-lv1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg1-lv1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sdk", "/dev/sdl", "/dev/xvda1", "/dev/mapper/test_vg1-lv1", "/dev/mapper/test_vg1-lv2", "/dev/mapper/test_vg2-lv3", "/dev/mapper/test_vg2-lv4", "/dev/mapper/test_vg3-lv5", "/dev/mapper/test_vg3-lv6", "/dev/mapper/test_vg3-lv7", "/dev/mapper/test_vg3-lv8" ], "mounts": [], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_kernel_device": "/dev/dm-7", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "15%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_kernel_device": "/dev/dm-6", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv3", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv4", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_kernel_device": "/dev/dm-3", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "_raw_kernel_device": "/dev/dm-3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv5", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "30%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_kernel_device": "/dev/dm-2", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "_raw_kernel_device": "/dev/dm-2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv6", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv7", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv8", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Saturday 06 December 2025 17:28:24 -0500 (0:00:00.065) 0:00:37.668 ***** ok: [managed-node1] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_kernel_device": "/dev/dm-7", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "15%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_kernel_device": "/dev/dm-6", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv3", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv4", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_kernel_device": "/dev/dm-3", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "_raw_kernel_device": "/dev/dm-3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv5", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "30%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_kernel_device": "/dev/dm-2", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "_raw_kernel_device": "/dev/dm-2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv6", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv7", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv8", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Saturday 06 December 2025 17:28:24 -0500 (0:00:00.067) 0:00:37.735 ***** ok: [managed-node1] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Saturday 06 December 2025 17:28:24 -0500 (0:00:00.036) 0:00:37.772 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Saturday 06 December 2025 17:28:24 -0500 (0:00:00.058) 0:00:37.830 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Saturday 06 December 2025 17:28:24 -0500 (0:00:00.058) 0:00:37.889 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Saturday 06 December 2025 17:28:24 -0500 (0:00:00.063) 0:00:37.952 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Saturday 06 December 2025 17:28:24 -0500 (0:00:00.063) 0:00:38.016 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Saturday 06 December 2025 17:28:24 -0500 (0:00:00.051) 0:00:38.067 ***** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1765059512.040086, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1716968941.893, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1716968586.525, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1157759751", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Saturday 06 December 2025 17:28:25 -0500 (0:00:00.338) 0:00:38.406 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Saturday 06 December 2025 17:28:25 -0500 (0:00:00.018) 0:00:38.424 ***** ok: [managed-node1] TASK [Run the snapshot role to create a snapshot set of LVs] ******************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:66 Saturday 06 December 2025 17:28:26 -0500 (0:00:00.896) 0:00:39.320 ***** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Saturday 06 December 2025 17:28:26 -0500 (0:00:00.055) 0:00:39.376 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Saturday 06 December 2025 17:28:26 -0500 (0:00:00.034) 0:00:39.410 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Saturday 06 December 2025 17:28:26 -0500 (0:00:00.042) 0:00:39.452 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Saturday 06 December 2025 17:28:26 -0500 (0:00:00.024) 0:00:39.476 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Saturday 06 December 2025 17:28:26 -0500 (0:00:00.023) 0:00:39.499 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Saturday 06 December 2025 17:28:26 -0500 (0:00:00.049) 0:00:39.549 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Saturday 06 December 2025 17:28:26 -0500 (0:00:00.039) 0:00:39.588 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: lvm2 util-linux TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Saturday 06 December 2025 17:28:28 -0500 (0:00:02.528) 0:00:42.117 ***** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory: b'snapm': b'snapm' ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Saturday 06 December 2025 17:28:29 -0500 (0:00:00.541) 0:00:42.658 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Saturday 06 December 2025 17:28:29 -0500 (0:00:00.029) 0:00:42.688 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Saturday 06 December 2025 17:28:29 -0500 (0:00:00.018) 0:00:42.706 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Saturday 06 December 2025 17:28:29 -0500 (0:00:00.065) 0:00:42.772 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Saturday 06 December 2025 17:28:29 -0500 (0:00:00.019) 0:00:42.792 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:57 Saturday 06 December 2025 17:28:29 -0500 (0:00:00.021) 0:00:42.814 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Run snapshot module snapshot] ******* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:71 Saturday 06 December 2025 17:28:29 -0500 (0:00:00.024) 0:00:42.838 ***** changed: [managed-node1] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:98 Saturday 06 December 2025 17:28:31 -0500 (0:00:02.111) 0:00:44.949 ***** ok: [managed-node1] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:104 Saturday 06 December 2025 17:28:31 -0500 (0:00:00.033) 0:00:44.982 ***** ok: [managed-node1] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:109 Saturday 06 December 2025 17:28:31 -0500 (0:00:00.035) 0:00:45.018 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_cmd is defined and snapshot_lvm_action == \"list\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:114 Saturday 06 December 2025 17:28:31 -0500 (0:00:00.030) 0:00:45.049 ***** skipping: [managed-node1] => { "false_condition": "snapshot_cmd is defined and snapshot_cmd[\"return_code\"] != 0" } TASK [Verify the set of snapshots for the LVs] ********************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:73 Saturday 06 December 2025 17:28:31 -0500 (0:00:00.041) 0:00:45.090 ***** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Saturday 06 December 2025 17:28:32 -0500 (0:00:00.067) 0:00:45.158 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Saturday 06 December 2025 17:28:32 -0500 (0:00:00.040) 0:00:45.198 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Saturday 06 December 2025 17:28:32 -0500 (0:00:00.050) 0:00:45.249 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Saturday 06 December 2025 17:28:32 -0500 (0:00:00.031) 0:00:45.280 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Saturday 06 December 2025 17:28:32 -0500 (0:00:00.026) 0:00:45.307 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Saturday 06 December 2025 17:28:32 -0500 (0:00:00.051) 0:00:45.358 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Saturday 06 December 2025 17:28:32 -0500 (0:00:00.039) 0:00:45.398 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: lvm2 util-linux TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Saturday 06 December 2025 17:28:34 -0500 (0:00:02.496) 0:00:47.894 ***** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory: b'snapm': b'snapm' ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Saturday 06 December 2025 17:28:35 -0500 (0:00:00.316) 0:00:48.211 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Saturday 06 December 2025 17:28:35 -0500 (0:00:00.026) 0:00:48.237 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Saturday 06 December 2025 17:28:35 -0500 (0:00:00.018) 0:00:48.256 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Saturday 06 December 2025 17:28:35 -0500 (0:00:00.053) 0:00:48.309 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Saturday 06 December 2025 17:28:35 -0500 (0:00:00.018) 0:00:48.327 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:57 Saturday 06 December 2025 17:28:35 -0500 (0:00:00.019) 0:00:48.347 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Run snapshot module check] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:71 Saturday 06 December 2025 17:28:35 -0500 (0:00:00.023) 0:00:48.370 ***** ok: [managed-node1] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:98 Saturday 06 December 2025 17:28:36 -0500 (0:00:00.857) 0:00:49.228 ***** ok: [managed-node1] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:104 Saturday 06 December 2025 17:28:36 -0500 (0:00:00.040) 0:00:49.268 ***** ok: [managed-node1] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:109 Saturday 06 December 2025 17:28:36 -0500 (0:00:00.032) 0:00:49.301 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_cmd is defined and snapshot_lvm_action == \"list\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:114 Saturday 06 December 2025 17:28:36 -0500 (0:00:00.031) 0:00:49.332 ***** skipping: [managed-node1] => { "false_condition": "snapshot_cmd is defined and snapshot_cmd[\"return_code\"] != 0" } TASK [Mount the set] *********************************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:81 Saturday 06 December 2025 17:28:36 -0500 (0:00:00.040) 0:00:49.372 ***** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Saturday 06 December 2025 17:28:36 -0500 (0:00:00.068) 0:00:49.441 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Saturday 06 December 2025 17:28:36 -0500 (0:00:00.030) 0:00:49.472 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Saturday 06 December 2025 17:28:36 -0500 (0:00:00.042) 0:00:49.514 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Saturday 06 December 2025 17:28:36 -0500 (0:00:00.025) 0:00:49.539 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Saturday 06 December 2025 17:28:36 -0500 (0:00:00.034) 0:00:49.574 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Saturday 06 December 2025 17:28:36 -0500 (0:00:00.054) 0:00:49.628 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Saturday 06 December 2025 17:28:36 -0500 (0:00:00.054) 0:00:49.683 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: lvm2 util-linux TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Saturday 06 December 2025 17:28:39 -0500 (0:00:02.468) 0:00:52.151 ***** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory: b'snapm': b'snapm' ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Saturday 06 December 2025 17:28:39 -0500 (0:00:00.317) 0:00:52.468 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Saturday 06 December 2025 17:28:39 -0500 (0:00:00.026) 0:00:52.495 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Saturday 06 December 2025 17:28:39 -0500 (0:00:00.019) 0:00:52.514 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Saturday 06 December 2025 17:28:39 -0500 (0:00:00.108) 0:00:52.623 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Saturday 06 December 2025 17:28:39 -0500 (0:00:00.020) 0:00:52.643 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:57 Saturday 06 December 2025 17:28:39 -0500 (0:00:00.030) 0:00:52.674 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:71 Saturday 06 December 2025 17:28:39 -0500 (0:00:00.027) 0:00:52.702 ***** changed: [managed-node1] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:98 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.696) 0:00:53.398 ***** ok: [managed-node1] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:104 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.031) 0:00:53.430 ***** ok: [managed-node1] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:109 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.031) 0:00:53.461 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_cmd is defined and snapshot_lvm_action == \"list\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:114 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.026) 0:00:53.488 ***** skipping: [managed-node1] => { "false_condition": "snapshot_cmd is defined and snapshot_cmd[\"return_code\"] != 0" } TASK [Assert changes for mount] ************************************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:89 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.037) 0:00:53.525 ***** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Verify the mount is done] ************************************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:93 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.022) 0:00:53.548 ***** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.071) 0:00:53.620 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.032) 0:00:53.653 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.054) 0:00:53.707 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.029) 0:00:53.737 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.022) 0:00:53.759 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.050) 0:00:53.809 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Saturday 06 December 2025 17:28:40 -0500 (0:00:00.041) 0:00:53.851 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: lvm2 util-linux TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Saturday 06 December 2025 17:28:43 -0500 (0:00:02.479) 0:00:56.331 ***** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory: b'snapm': b'snapm' ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Saturday 06 December 2025 17:28:43 -0500 (0:00:00.352) 0:00:56.683 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Saturday 06 December 2025 17:28:43 -0500 (0:00:00.039) 0:00:56.723 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Saturday 06 December 2025 17:28:43 -0500 (0:00:00.031) 0:00:56.755 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Saturday 06 December 2025 17:28:43 -0500 (0:00:00.089) 0:00:56.844 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Saturday 06 December 2025 17:28:43 -0500 (0:00:00.044) 0:00:56.889 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:57 Saturday 06 December 2025 17:28:43 -0500 (0:00:00.029) 0:00:56.919 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:71 Saturday 06 December 2025 17:28:43 -0500 (0:00:00.038) 0:00:56.958 ***** ok: [managed-node1] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:98 Saturday 06 December 2025 17:28:44 -0500 (0:00:00.641) 0:00:57.599 ***** ok: [managed-node1] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:104 Saturday 06 December 2025 17:28:44 -0500 (0:00:00.036) 0:00:57.636 ***** ok: [managed-node1] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:109 Saturday 06 December 2025 17:28:44 -0500 (0:00:00.030) 0:00:57.667 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_cmd is defined and snapshot_lvm_action == \"list\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:114 Saturday 06 December 2025 17:28:44 -0500 (0:00:00.028) 0:00:57.695 ***** skipping: [managed-node1] => { "false_condition": "snapshot_cmd is defined and snapshot_cmd[\"return_code\"] != 0" } TASK [Mount the set again to check idempotence] ******************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:101 Saturday 06 December 2025 17:28:44 -0500 (0:00:00.034) 0:00:57.730 ***** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Saturday 06 December 2025 17:28:44 -0500 (0:00:00.081) 0:00:57.811 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Saturday 06 December 2025 17:28:44 -0500 (0:00:00.034) 0:00:57.845 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Saturday 06 December 2025 17:28:44 -0500 (0:00:00.059) 0:00:57.905 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Saturday 06 December 2025 17:28:44 -0500 (0:00:00.026) 0:00:57.931 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Saturday 06 December 2025 17:28:44 -0500 (0:00:00.024) 0:00:57.956 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Saturday 06 December 2025 17:28:44 -0500 (0:00:00.088) 0:00:58.044 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Saturday 06 December 2025 17:28:44 -0500 (0:00:00.041) 0:00:58.085 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: lvm2 util-linux TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Saturday 06 December 2025 17:28:47 -0500 (0:00:02.470) 0:01:00.556 ***** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory: b'snapm': b'snapm' ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Saturday 06 December 2025 17:28:47 -0500 (0:00:00.319) 0:01:00.875 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Saturday 06 December 2025 17:28:47 -0500 (0:00:00.028) 0:01:00.904 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Saturday 06 December 2025 17:28:47 -0500 (0:00:00.019) 0:01:00.924 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Saturday 06 December 2025 17:28:47 -0500 (0:00:00.050) 0:01:00.974 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Saturday 06 December 2025 17:28:47 -0500 (0:00:00.019) 0:01:00.993 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:57 Saturday 06 December 2025 17:28:47 -0500 (0:00:00.020) 0:01:01.014 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:71 Saturday 06 December 2025 17:28:47 -0500 (0:00:00.024) 0:01:01.038 ***** ok: [managed-node1] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:98 Saturday 06 December 2025 17:28:48 -0500 (0:00:00.587) 0:01:01.626 ***** ok: [managed-node1] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:104 Saturday 06 December 2025 17:28:48 -0500 (0:00:00.032) 0:01:01.659 ***** ok: [managed-node1] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:109 Saturday 06 December 2025 17:28:48 -0500 (0:00:00.032) 0:01:01.691 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_cmd is defined and snapshot_lvm_action == \"list\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:114 Saturday 06 December 2025 17:28:48 -0500 (0:00:00.026) 0:01:01.717 ***** skipping: [managed-node1] => { "false_condition": "snapshot_cmd is defined and snapshot_cmd[\"return_code\"] != 0" } TASK [Assert no changes for mount] ********************************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:109 Saturday 06 December 2025 17:28:48 -0500 (0:00:00.034) 0:01:01.752 ***** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Umount the set] ********************************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:113 Saturday 06 December 2025 17:28:48 -0500 (0:00:00.027) 0:01:01.780 ***** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Saturday 06 December 2025 17:28:48 -0500 (0:00:00.138) 0:01:01.919 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Saturday 06 December 2025 17:28:48 -0500 (0:00:00.050) 0:01:01.970 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Saturday 06 December 2025 17:28:48 -0500 (0:00:00.065) 0:01:02.036 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Saturday 06 December 2025 17:28:48 -0500 (0:00:00.038) 0:01:02.074 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Saturday 06 December 2025 17:28:48 -0500 (0:00:00.036) 0:01:02.110 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Saturday 06 December 2025 17:28:49 -0500 (0:00:00.096) 0:01:02.207 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Saturday 06 December 2025 17:28:49 -0500 (0:00:00.045) 0:01:02.253 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: lvm2 util-linux TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Saturday 06 December 2025 17:28:51 -0500 (0:00:02.512) 0:01:04.765 ***** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory: b'snapm': b'snapm' ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Saturday 06 December 2025 17:28:51 -0500 (0:00:00.329) 0:01:05.094 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Saturday 06 December 2025 17:28:51 -0500 (0:00:00.026) 0:01:05.121 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Saturday 06 December 2025 17:28:51 -0500 (0:00:00.019) 0:01:05.141 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Saturday 06 December 2025 17:28:52 -0500 (0:00:00.055) 0:01:05.196 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Saturday 06 December 2025 17:28:52 -0500 (0:00:00.020) 0:01:05.217 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:57 Saturday 06 December 2025 17:28:52 -0500 (0:00:00.020) 0:01:05.237 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:71 Saturday 06 December 2025 17:28:52 -0500 (0:00:00.024) 0:01:05.262 ***** changed: [managed-node1] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:98 Saturday 06 December 2025 17:28:52 -0500 (0:00:00.651) 0:01:05.914 ***** ok: [managed-node1] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:104 Saturday 06 December 2025 17:28:52 -0500 (0:00:00.031) 0:01:05.945 ***** ok: [managed-node1] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:109 Saturday 06 December 2025 17:28:52 -0500 (0:00:00.030) 0:01:05.975 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_cmd is defined and snapshot_lvm_action == \"list\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:114 Saturday 06 December 2025 17:28:52 -0500 (0:00:00.025) 0:01:06.001 ***** skipping: [managed-node1] => { "false_condition": "snapshot_cmd is defined and snapshot_cmd[\"return_code\"] != 0" } TASK [Assert changes for umount] *********************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:120 Saturday 06 December 2025 17:28:52 -0500 (0:00:00.034) 0:01:06.036 ***** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Verify the umount is done] *********************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:124 Saturday 06 December 2025 17:28:52 -0500 (0:00:00.022) 0:01:06.059 ***** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Saturday 06 December 2025 17:28:53 -0500 (0:00:00.126) 0:01:06.186 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Saturday 06 December 2025 17:28:53 -0500 (0:00:00.033) 0:01:06.219 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Saturday 06 December 2025 17:28:53 -0500 (0:00:00.042) 0:01:06.261 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Saturday 06 December 2025 17:28:53 -0500 (0:00:00.024) 0:01:06.286 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Saturday 06 December 2025 17:28:53 -0500 (0:00:00.023) 0:01:06.309 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Saturday 06 December 2025 17:28:53 -0500 (0:00:00.066) 0:01:06.376 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Saturday 06 December 2025 17:28:53 -0500 (0:00:00.037) 0:01:06.413 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: lvm2 util-linux TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Saturday 06 December 2025 17:28:55 -0500 (0:00:02.461) 0:01:08.875 ***** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory: b'snapm': b'snapm' ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Saturday 06 December 2025 17:28:56 -0500 (0:00:00.358) 0:01:09.233 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Saturday 06 December 2025 17:28:56 -0500 (0:00:00.031) 0:01:09.264 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Saturday 06 December 2025 17:28:56 -0500 (0:00:00.023) 0:01:09.288 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Saturday 06 December 2025 17:28:56 -0500 (0:00:00.068) 0:01:09.357 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Saturday 06 December 2025 17:28:56 -0500 (0:00:00.029) 0:01:09.387 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:57 Saturday 06 December 2025 17:28:56 -0500 (0:00:00.032) 0:01:09.419 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:71 Saturday 06 December 2025 17:28:56 -0500 (0:00:00.062) 0:01:09.482 ***** ok: [managed-node1] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:98 Saturday 06 December 2025 17:28:56 -0500 (0:00:00.443) 0:01:09.925 ***** ok: [managed-node1] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:104 Saturday 06 December 2025 17:28:56 -0500 (0:00:00.031) 0:01:09.957 ***** ok: [managed-node1] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:109 Saturday 06 December 2025 17:28:56 -0500 (0:00:00.032) 0:01:09.989 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_cmd is defined and snapshot_lvm_action == \"list\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:114 Saturday 06 December 2025 17:28:56 -0500 (0:00:00.026) 0:01:10.016 ***** skipping: [managed-node1] => { "false_condition": "snapshot_cmd is defined and snapshot_cmd[\"return_code\"] != 0" } TASK [Umount the set again to check idempotence] ******************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:132 Saturday 06 December 2025 17:28:56 -0500 (0:00:00.034) 0:01:10.050 ***** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Saturday 06 December 2025 17:28:57 -0500 (0:00:00.107) 0:01:10.158 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Saturday 06 December 2025 17:28:57 -0500 (0:00:00.034) 0:01:10.192 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Saturday 06 December 2025 17:28:57 -0500 (0:00:00.043) 0:01:10.236 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Saturday 06 December 2025 17:28:57 -0500 (0:00:00.026) 0:01:10.262 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Saturday 06 December 2025 17:28:57 -0500 (0:00:00.026) 0:01:10.288 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Saturday 06 December 2025 17:28:57 -0500 (0:00:00.052) 0:01:10.341 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Saturday 06 December 2025 17:28:57 -0500 (0:00:00.040) 0:01:10.382 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: lvm2 util-linux TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Saturday 06 December 2025 17:28:59 -0500 (0:00:02.485) 0:01:12.867 ***** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory: b'snapm': b'snapm' ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Saturday 06 December 2025 17:29:00 -0500 (0:00:00.363) 0:01:13.231 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Saturday 06 December 2025 17:29:00 -0500 (0:00:00.040) 0:01:13.272 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Saturday 06 December 2025 17:29:00 -0500 (0:00:00.036) 0:01:13.308 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Saturday 06 December 2025 17:29:00 -0500 (0:00:00.074) 0:01:13.382 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Saturday 06 December 2025 17:29:00 -0500 (0:00:00.024) 0:01:13.407 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:57 Saturday 06 December 2025 17:29:00 -0500 (0:00:00.022) 0:01:13.429 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:71 Saturday 06 December 2025 17:29:00 -0500 (0:00:00.028) 0:01:13.457 ***** ok: [managed-node1] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:98 Saturday 06 December 2025 17:29:00 -0500 (0:00:00.651) 0:01:14.108 ***** ok: [managed-node1] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:104 Saturday 06 December 2025 17:29:01 -0500 (0:00:00.072) 0:01:14.180 ***** ok: [managed-node1] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:109 Saturday 06 December 2025 17:29:01 -0500 (0:00:00.033) 0:01:14.213 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_cmd is defined and snapshot_lvm_action == \"list\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:114 Saturday 06 December 2025 17:29:01 -0500 (0:00:00.027) 0:01:14.241 ***** skipping: [managed-node1] => { "false_condition": "snapshot_cmd is defined and snapshot_cmd[\"return_code\"] != 0" } TASK [Assert no changes for umount] ******************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:139 Saturday 06 December 2025 17:29:01 -0500 (0:00:00.036) 0:01:14.278 ***** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Run the snapshot role remove the set] ************************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:143 Saturday 06 December 2025 17:29:01 -0500 (0:00:00.024) 0:01:14.303 ***** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Saturday 06 December 2025 17:29:01 -0500 (0:00:00.116) 0:01:14.420 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Saturday 06 December 2025 17:29:01 -0500 (0:00:00.034) 0:01:14.454 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Saturday 06 December 2025 17:29:01 -0500 (0:00:00.044) 0:01:14.498 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Saturday 06 December 2025 17:29:01 -0500 (0:00:00.024) 0:01:14.523 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Saturday 06 December 2025 17:29:01 -0500 (0:00:00.024) 0:01:14.548 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Saturday 06 December 2025 17:29:01 -0500 (0:00:00.059) 0:01:14.607 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Saturday 06 December 2025 17:29:01 -0500 (0:00:00.063) 0:01:14.671 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: lvm2 util-linux TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Saturday 06 December 2025 17:29:04 -0500 (0:00:02.551) 0:01:17.222 ***** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory: b'snapm': b'snapm' ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Saturday 06 December 2025 17:29:04 -0500 (0:00:00.347) 0:01:17.569 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Saturday 06 December 2025 17:29:04 -0500 (0:00:00.029) 0:01:17.598 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Saturday 06 December 2025 17:29:04 -0500 (0:00:00.021) 0:01:17.619 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Saturday 06 December 2025 17:29:04 -0500 (0:00:00.053) 0:01:17.673 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Saturday 06 December 2025 17:29:04 -0500 (0:00:00.020) 0:01:17.693 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:57 Saturday 06 December 2025 17:29:04 -0500 (0:00:00.021) 0:01:17.714 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Run snapshot module remove] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:71 Saturday 06 December 2025 17:29:04 -0500 (0:00:00.025) 0:01:17.740 ***** changed: [managed-node1] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:98 Saturday 06 December 2025 17:29:06 -0500 (0:00:01.441) 0:01:19.182 ***** ok: [managed-node1] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:104 Saturday 06 December 2025 17:29:06 -0500 (0:00:00.045) 0:01:19.227 ***** ok: [managed-node1] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:109 Saturday 06 December 2025 17:29:06 -0500 (0:00:00.039) 0:01:19.267 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_cmd is defined and snapshot_lvm_action == \"list\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:114 Saturday 06 December 2025 17:29:06 -0500 (0:00:00.027) 0:01:19.294 ***** skipping: [managed-node1] => { "false_condition": "snapshot_cmd is defined and snapshot_cmd[\"return_code\"] != 0" } TASK [Run the snapshot role to verify the set is removed] ********************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:150 Saturday 06 December 2025 17:29:06 -0500 (0:00:00.035) 0:01:19.329 ***** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Saturday 06 December 2025 17:29:06 -0500 (0:00:00.128) 0:01:19.458 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Saturday 06 December 2025 17:29:06 -0500 (0:00:00.032) 0:01:19.490 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Saturday 06 December 2025 17:29:06 -0500 (0:00:00.042) 0:01:19.532 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Saturday 06 December 2025 17:29:06 -0500 (0:00:00.024) 0:01:19.557 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Saturday 06 December 2025 17:29:06 -0500 (0:00:00.023) 0:01:19.580 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Saturday 06 December 2025 17:29:06 -0500 (0:00:00.058) 0:01:19.639 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Saturday 06 December 2025 17:29:06 -0500 (0:00:00.042) 0:01:19.682 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: lvm2 util-linux TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Saturday 06 December 2025 17:29:09 -0500 (0:00:02.547) 0:01:22.229 ***** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory: b'snapm': b'snapm' ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Saturday 06 December 2025 17:29:09 -0500 (0:00:00.347) 0:01:22.577 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Saturday 06 December 2025 17:29:09 -0500 (0:00:00.040) 0:01:22.617 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Saturday 06 December 2025 17:29:09 -0500 (0:00:00.029) 0:01:22.646 ***** ok: [managed-node1] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Saturday 06 December 2025 17:29:09 -0500 (0:00:00.073) 0:01:22.720 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_snapm_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Saturday 06 December 2025 17:29:09 -0500 (0:00:00.021) 0:01:22.741 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:57 Saturday 06 December 2025 17:29:09 -0500 (0:00:00.029) 0:01:22.770 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__snapshot_needs_bootable_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Run snapshot module remove] ********* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:71 Saturday 06 December 2025 17:29:09 -0500 (0:00:00.040) 0:01:22.811 ***** ok: [managed-node1] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:98 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.632) 0:01:23.444 ***** ok: [managed-node1] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:104 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.032) 0:01:23.477 ***** ok: [managed-node1] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:109 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.031) 0:01:23.508 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "snapshot_cmd is defined and snapshot_lvm_action == \"list\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:114 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.026) 0:01:23.534 ***** skipping: [managed-node1] => { "false_condition": "snapshot_cmd is defined and snapshot_cmd[\"return_code\"] != 0" } TASK [Cleanup] ***************************************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_set_mount.yml:159 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.035) 0:01:23.570 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml for managed-node1 TASK [Remove storage volumes] ************************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml:7 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.074) 0:01:23.645 ***** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.033) 0:01:23.678 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.031) 0:01:23.710 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.042) 0:01:23.753 ***** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.051) 0:01:23.804 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.023) 0:01:23.828 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.024) 0:01:23.852 ***** ok: [managed-node1] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.020) 0:01:23.873 ***** ok: [managed-node1] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.019) 0:01:23.893 ***** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Saturday 06 December 2025 17:29:10 -0500 (0:00:00.050) 0:01:23.944 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet stratis-cli stratisd vdo xfsprogs TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 06 December 2025 17:29:13 -0500 (0:00:02.470) 0:01:26.414 ***** ok: [managed-node1] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb", "sdc" ], "name": "test_vg1", "state": "absent", "volumes": [ { "name": "lv1", "state": "absent" }, { "name": "lv2", "state": "absent" } ] }, { "disks": [ "sdd", "sde", "sdf" ], "name": "test_vg2", "state": "absent", "volumes": [ { "name": "lv3", "state": "absent" }, { "name": "lv4", "state": "absent" } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "name": "test_vg3", "state": "absent", "volumes": [ { "name": "lv5", "state": "absent" }, { "name": "lv6", "state": "absent" }, { "name": "lv7", "state": "absent" }, { "name": "lv8", "state": "absent" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Saturday 06 December 2025 17:29:13 -0500 (0:00:00.056) 0:01:26.471 ***** ok: [managed-node1] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Saturday 06 December 2025 17:29:13 -0500 (0:00:00.047) 0:01:26.519 ***** ok: [managed-node1] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 06 December 2025 17:29:19 -0500 (0:00:05.666) 0:01:32.185 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Saturday 06 December 2025 17:29:19 -0500 (0:00:00.069) 0:01:32.255 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Saturday 06 December 2025 17:29:19 -0500 (0:00:00.063) 0:01:32.319 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Saturday 06 December 2025 17:29:19 -0500 (0:00:00.083) 0:01:32.402 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Saturday 06 December 2025 17:29:19 -0500 (0:00:00.055) 0:01:32.458 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Saturday 06 December 2025 17:29:21 -0500 (0:00:02.485) 0:01:34.944 ***** ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "auto-cpufreq.service": { "name": "auto-cpufreq.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:112.service": { "name": "lvm2-pvscan@8:112.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:128.service": { "name": "lvm2-pvscan@8:128.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:144.service": { "name": "lvm2-pvscan@8:144.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:16.service": { "name": "lvm2-pvscan@8:16.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:32.service": { "name": "lvm2-pvscan@8:32.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:48.service": { "name": "lvm2-pvscan@8:48.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:64.service": { "name": "lvm2-pvscan@8:64.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:80.service": { "name": "lvm2-pvscan@8:80.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@8:96.service": { "name": "lvm2-pvscan@8:96.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "power-profiles-daemon.service": { "name": "power-profiles-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tlp.service": { "name": "tlp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Saturday 06 December 2025 17:29:23 -0500 (0:00:01.683) 0:01:36.627 ***** ok: [managed-node1] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Saturday 06 December 2025 17:29:23 -0500 (0:00:00.084) 0:01:36.712 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Saturday 06 December 2025 17:29:23 -0500 (0:00:00.029) 0:01:36.742 ***** changed: [managed-node1] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv8", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv8", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv7", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv7", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv6", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv6", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv5", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv5", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg3", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdi", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdj", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/mapper/test_vg2-lv4", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg2-lv4", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg2-lv3", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg2-lv3", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg2", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/mapper/test_vg1-lv2", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg1-lv2", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg1-lv1", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg1-lv1", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdk", "/dev/sdl", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/sdj", "/dev/xvda1" ], "mounts": [], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1451229184, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 4827643904, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv3", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 968884224, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv4", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1933574144, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv5", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 3862953984, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv6", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 3217031168, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv7", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1287651328, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv8", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1287651328, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Saturday 06 December 2025 17:29:33 -0500 (0:00:09.455) 0:01:46.197 ***** ok: [managed-node1] => { "changed": false, "cmd": [ "udevadm", "trigger", "--subsystem-match=block" ], "delta": "0:00:00.016505", "end": "2025-12-06 17:29:33.354324", "rc": 0, "start": "2025-12-06 17:29:33.337819" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Saturday 06 December 2025 17:29:33 -0500 (0:00:00.489) 0:01:46.687 ***** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1765059700.0174959, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "ab8070345774adad92683e9645714452be7be474", "ctime": 1765059644.069301, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 174064384, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1765059644.068301, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1343, "uid": 0, "version": "3298298534", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Saturday 06 December 2025 17:29:33 -0500 (0:00:00.346) 0:01:47.034 ***** ok: [managed-node1] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 06 December 2025 17:29:34 -0500 (0:00:00.349) 0:01:47.384 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Saturday 06 December 2025 17:29:34 -0500 (0:00:00.020) 0:01:47.404 ***** ok: [managed-node1] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv8", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv8", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv7", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv7", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv6", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv6", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv5", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv5", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg3", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdi", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdj", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/mapper/test_vg2-lv4", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg2-lv4", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg2-lv3", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg2-lv3", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg2", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/mapper/test_vg1-lv2", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg1-lv2", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg1-lv1", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg1-lv1", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdk", "/dev/sdl", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/sdj", "/dev/xvda1" ], "mounts": [], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1451229184, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 4827643904, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv3", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 968884224, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv4", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1933574144, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv5", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 3862953984, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv6", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 3217031168, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv7", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1287651328, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv8", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1287651328, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Saturday 06 December 2025 17:29:34 -0500 (0:00:00.042) 0:01:47.446 ***** ok: [managed-node1] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1451229184, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 4827643904, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv3", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 968884224, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv4", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1933574144, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv5", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 3862953984, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv6", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 3217031168, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv7", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1287651328, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv8", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1287651328, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Saturday 06 December 2025 17:29:34 -0500 (0:00:00.036) 0:01:47.483 ***** ok: [managed-node1] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Saturday 06 December 2025 17:29:34 -0500 (0:00:00.025) 0:01:47.509 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Saturday 06 December 2025 17:29:34 -0500 (0:00:00.040) 0:01:47.549 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Saturday 06 December 2025 17:29:34 -0500 (0:00:00.041) 0:01:47.590 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Saturday 06 December 2025 17:29:34 -0500 (0:00:00.041) 0:01:47.632 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Saturday 06 December 2025 17:29:34 -0500 (0:00:00.045) 0:01:47.678 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Saturday 06 December 2025 17:29:34 -0500 (0:00:00.041) 0:01:47.719 ***** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1765059512.040086, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1716968941.893, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1716968586.525, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1157759751", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Saturday 06 December 2025 17:29:34 -0500 (0:00:00.344) 0:01:48.064 ***** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Saturday 06 December 2025 17:29:34 -0500 (0:00:00.030) 0:01:48.095 ***** ok: [managed-node1] TASK [Save unused_disk_return before verify] *********************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml:30 Saturday 06 December 2025 17:29:35 -0500 (0:00:00.858) 0:01:48.953 ***** ok: [managed-node1] => { "ansible_facts": { "unused_disks_before": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] }, "changed": false } TASK [Verify that pools/volumes used in test are removed] ********************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml:34 Saturday 06 December 2025 17:29:35 -0500 (0:00:00.023) 0:01:48.976 ***** included: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml for managed-node1 TASK [Check if system is ostree] *********************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:5 Saturday 06 December 2025 17:29:35 -0500 (0:00:00.034) 0:01:49.011 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [Set flag to indicate system is ostree] *********************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:10 Saturday 06 December 2025 17:29:35 -0500 (0:00:00.022) 0:01:49.034 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "not __snapshot_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [Ensure test packages] **************************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:14 Saturday 06 December 2025 17:29:35 -0500 (0:00:00.023) 0:01:49.057 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: util-linux TASK [Find unused disks in the system] ***************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:23 Saturday 06 December 2025 17:29:38 -0500 (0:00:02.458) 0:01:51.516 ***** ok: [managed-node1] => { "changed": false, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ], "info": [ "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdj\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdk\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdl\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG-SEC=\"512\"", "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG-SEC=\"512\"", "filename [xvda1] is a partition", "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions" ] } TASK [Set unused_disks if necessary] ******************************************* task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:31 Saturday 06 December 2025 17:29:38 -0500 (0:00:00.384) 0:01:51.900 ***** ok: [managed-node1] => { "ansible_facts": { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] }, "changed": false } TASK [Print unused disks] ****************************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:36 Saturday 06 December 2025 17:29:38 -0500 (0:00:00.025) 0:01:51.926 ***** ok: [managed-node1] => { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] } TASK [Print info from find_unused_disk] **************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:44 Saturday 06 December 2025 17:29:38 -0500 (0:00:00.037) 0:01:51.963 ***** skipping: [managed-node1] => { "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)" } TASK [Show disk information] *************************************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:49 Saturday 06 December 2025 17:29:38 -0500 (0:00:00.041) 0:01:52.004 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)", "skip_reason": "Conditional result was False" } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:58 Saturday 06 December 2025 17:29:38 -0500 (0:00:00.040) 0:01:52.045 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)", "skip_reason": "Conditional result was False" } TASK [Debug why list of unused disks has changed] ****************************** task path: /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml:40 Saturday 06 December 2025 17:29:38 -0500 (0:00:00.039) 0:01:52.085 ***** skipping: [managed-node1] => { "changed": false, "false_condition": "unused_disks_before != unused_disks_return.disks", "skip_reason": "Conditional result was False" } PLAY RECAP ********************************************************************* managed-node1 : ok=178 changed=6 unreachable=0 failed=0 skipped=151 rescued=0 ignored=10 SYSTEM ROLES ERRORS BEGIN v1 [] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Saturday 06 December 2025 17:29:38 -0500 (0:00:00.011) 0:01:52.097 ***** =============================================================================== fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 9.46s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 8.04s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Get required packages --------------- 5.67s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 fedora.linux_system_roles.storage : Get required packages --------------- 4.08s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 fedora.linux_system_roles.storage : Make sure blivet is available ------- 2.73s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 fedora.linux_system_roles.storage : Make sure blivet is available ------- 2.58s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.55s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.55s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.53s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.51s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.50s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 fedora.linux_system_roles.storage : Make sure required packages are installed --- 2.49s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.49s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.48s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 fedora.linux_system_roles.storage : Make sure blivet is available ------- 2.47s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.47s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.47s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 fedora.linux_system_roles.storage : Make sure required packages are installed --- 2.46s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.46s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Ensure test packages ---------------------------------------------------- 2.46s /tmp/collections-fud/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:14