ansible-playbook [core 2.17.14] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-wfn executable location = /usr/local/bin/ansible-playbook python version = 3.12.12 (main, Jan 16 2026, 00:00:00) [GCC 14.3.1 20251022 (Red Hat 14.3.1-4)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_basic.yml ****************************************************** 2 plays in /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:5 Saturday 07 March 2026 11:43:14 -0500 (0:00:00.028) 0:00:00.028 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-WxE/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role runs with default parameters] *********************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:9 Saturday 07 March 2026 11:43:14 -0500 (0:00:00.019) 0:00:00.048 ******** [WARNING]: Platform linux on host managed-node2 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node2] TASK [Create tmpdir for testing] *********************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:143 Saturday 07 March 2026 11:43:15 -0500 (0:00:01.361) 0:00:01.410 ******** changed: [managed-node2] => { "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/tmp/lsr_od4netlk_podman", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [Change tmpdir permissions] *********************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:150 Saturday 07 March 2026 11:43:16 -0500 (0:00:00.497) 0:00:01.907 ******** changed: [managed-node2] => { "changed": true, "gid": 0, "group": "root", "mode": "0777", "owner": "root", "path": "/tmp/lsr_od4netlk_podman", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [Enable podman copr] ****************************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:161 Saturday 07 March 2026 11:43:16 -0500 (0:00:00.486) 0:00:02.394 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [Install podman from updates-testing] ************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:166 Saturday 07 March 2026 11:43:16 -0500 (0:00:00.028) 0:00:02.422 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [Podman version] ********************************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:171 Saturday 07 March 2026 11:43:16 -0500 (0:00:00.027) 0:00:02.450 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [Create user] ************************************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:176 Saturday 07 March 2026 11:43:16 -0500 (0:00:00.028) 0:00:02.479 ******** changed: [managed-node2] => { "changed": true, "comment": "", "create_home": true, "group": 3001, "home": "/home/podman_basic_user", "name": "podman_basic_user", "shell": "/bin/bash", "state": "present", "system": false, "uid": 3001 } TASK [Create tempfile for kube_src] ******************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:181 Saturday 07 March 2026 11:43:17 -0500 (0:00:00.581) 0:00:03.060 ******** changed: [managed-node2 -> localhost] => { "changed": true, "gid": 0, "group": "root", "mode": "0600", "owner": "root", "path": "/tmp/lsr_podman_6snohudj.yml", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [Write kube_file_src] ***************************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:189 Saturday 07 March 2026 11:43:17 -0500 (0:00:00.274) 0:00:03.335 ******** changed: [managed-node2 -> localhost] => { "changed": true, "checksum": "655ddda96a42bc941a427b5b886c4c9e2d94ade4", "dest": "/tmp/lsr_podman_6snohudj.yml", "gid": 0, "group": "root", "md5sum": "4a5339b960ad519515e17825f7e9073c", "mode": "0600", "owner": "root", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 710, "src": "/root/.ansible/tmp/ansible-tmp-1772901797.8477888-9341-263601333299479/.source.yml", "state": "file", "uid": 0 } TASK [Create host directories for data] **************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:196 Saturday 07 March 2026 11:43:18 -0500 (0:00:00.638) 0:00:03.973 ******** changed: [managed-node2] => (item=['httpd1', 'podman_basic_user', 3001]) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": [ "httpd1", "podman_basic_user", 3001 ], "mode": "0755", "owner": "podman_basic_user", "path": "/tmp/lsr_od4netlk_podman/httpd1", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 3001 } changed: [managed-node2] => (item=['httpd2', 'root', 0]) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": [ "httpd2", "root", 0 ], "mode": "0755", "owner": "root", "path": "/tmp/lsr_od4netlk_podman/httpd2", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=['httpd3', 'root', 0]) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": [ "httpd3", "root", 0 ], "mode": "0755", "owner": "root", "path": "/tmp/lsr_od4netlk_podman/httpd3", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [Create data files] ******************************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:204 Saturday 07 March 2026 11:43:19 -0500 (0:00:01.032) 0:00:05.006 ******** changed: [managed-node2] => (item=['httpd1', 'podman_basic_user', 3001]) => { "ansible_loop_var": "item", "changed": true, "checksum": "40bd001563085fc35165329ea1ff5c5ecbdbbeef", "dest": "/tmp/lsr_od4netlk_podman/httpd1/index.txt", "gid": 0, "group": "root", "item": [ "httpd1", "podman_basic_user", 3001 ], "md5sum": "202cb962ac59075b964b07152d234b70", "mode": "0644", "owner": "podman_basic_user", "secontext": "unconfined_u:object_r:admin_home_t:s0", "size": 3, "src": "/root/.ansible/tmp/ansible-tmp-1772901799.540683-9404-78214707835047/.source.txt", "state": "file", "uid": 3001 } changed: [managed-node2] => (item=['httpd2', 'root', 0]) => { "ansible_loop_var": "item", "changed": true, "checksum": "40bd001563085fc35165329ea1ff5c5ecbdbbeef", "dest": "/tmp/lsr_od4netlk_podman/httpd2/index.txt", "gid": 0, "group": "root", "item": [ "httpd2", "root", 0 ], "md5sum": "202cb962ac59075b964b07152d234b70", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:admin_home_t:s0", "size": 3, "src": "/root/.ansible/tmp/ansible-tmp-1772901800.166391-9404-258267279359070/.source.txt", "state": "file", "uid": 0 } changed: [managed-node2] => (item=['httpd3', 'root', 0]) => { "ansible_loop_var": "item", "changed": true, "checksum": "40bd001563085fc35165329ea1ff5c5ecbdbbeef", "dest": "/tmp/lsr_od4netlk_podman/httpd3/index.txt", "gid": 0, "group": "root", "item": [ "httpd3", "root", 0 ], "md5sum": "202cb962ac59075b964b07152d234b70", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:admin_home_t:s0", "size": 3, "src": "/root/.ansible/tmp/ansible-tmp-1772901800.8070908-9404-202203131295519/.source.txt", "state": "file", "uid": 0 } TASK [Run role - do not pull images] ******************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:214 Saturday 07 March 2026 11:43:21 -0500 (0:00:01.911) 0:00:06.917 ******** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 07 March 2026 11:43:21 -0500 (0:00:00.048) 0:00:06.966 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 07 March 2026 11:43:21 -0500 (0:00:00.022) 0:00:06.988 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 07 March 2026 11:43:21 -0500 (0:00:00.031) 0:00:07.020 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 07 March 2026 11:43:21 -0500 (0:00:00.345) 0:00:07.365 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 07 March 2026 11:43:21 -0500 (0:00:00.021) 0:00:07.387 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 07 March 2026 11:43:22 -0500 (0:00:00.343) 0:00:07.730 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 07 March 2026 11:43:22 -0500 (0:00:00.021) 0:00:07.751 ******** [WARNING]: TASK: fedora.linux_system_roles.podman : Set platform/version specific variables: The loop variable '__vars_file' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. skipping: [managed-node2] => (item=RedHat.yml) => { "__vars_file": "RedHat.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "__vars_file": "CentOS.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "__vars_file": "CentOS_10.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "__vars_file": "CentOS_10.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.podman : Run systemctl] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:52 Saturday 07 March 2026 11:43:22 -0500 (0:00:00.028) 0:00:07.780 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.008695", "end": "2026-03-07 11:43:22.659616", "failed_when_result": false, "rc": 0, "start": "2026-03-07 11:43:22.650921" } STDOUT: running TASK [fedora.linux_system_roles.podman : Require installed systemd] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:60 Saturday 07 March 2026 11:43:22 -0500 (0:00:00.438) 0:00:08.218 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "\"No such file or directory\" in __is_system_running.msg | d(\"\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:65 Saturday 07 March 2026 11:43:22 -0500 (0:00:00.035) 0:00:08.253 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 07 March 2026 11:43:22 -0500 (0:00:00.019) 0:00:08.273 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 07 March 2026 11:43:23 -0500 (0:00:01.135) 0:00:09.408 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 07 March 2026 11:43:23 -0500 (0:00:00.034) 0:00:09.443 ******** changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: protobuf-c-1.5.0-6.el10.x86_64", "Installed: netavark-2:1.17.2-1.el10.x86_64", "Installed: libgpg-error-1.50-2.el10.x86_64", "Installed: conmon-3:2.2.1-3.el10.x86_64", "Installed: libassuan-2.5.6-6.el10.x86_64", "Installed: npth-1.6-21.el10.x86_64", "Installed: container-selinux-4:2.246.0-1.el10.noarch", "Installed: catatonit-5:0.2.1-3.el10.x86_64", "Installed: aardvark-dns-2:1.17.0-1.el10.x86_64", "Installed: containers-common-6:5.8-2.el10.noarch", "Installed: containers-common-extra-6:5.8-2.el10.noarch", "Installed: podman-7:5.8.0-2.el10.x86_64", "Installed: libksba-1.6.7-2.el10.x86_64", "Installed: podman-sequoia-0.4.0~pqc.2-1.el10.x86_64", "Installed: shadow-utils-subid-2:4.15.0-11.el10.x86_64", "Installed: passt-0^20251210.gd04c480-3.el10.x86_64", "Installed: passt-selinux-0^20251210.gd04c480-3.el10.noarch", "Installed: libnet-1.3-7.el10.x86_64", "Installed: criu-4.2-1.el10.x86_64", "Installed: gnupg2-2.4.5-4.el10.x86_64", "Installed: criu-libs-4.2-1.el10.x86_64", "Installed: crun-1.26-2.el10.x86_64", "Installed: gpgme-1.23.2-6.el10.x86_64", "Installed: libgcrypt-1.11.0-6.el10.x86_64" ] } lsrpackages: podman TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 07 March 2026 11:43:57 -0500 (0:00:33.089) 0:00:42.532 ******** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 07 March 2026 11:43:57 -0500 (0:00:00.035) 0:00:42.567 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 07 March 2026 11:43:57 -0500 (0:00:00.033) 0:00:42.600 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 07 March 2026 11:43:57 -0500 (0:00:00.033) 0:00:42.634 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.028472", "end": "2026-03-07 11:43:57.508815", "rc": 0, "start": "2026-03-07 11:43:57.480343" } STDOUT: podman version 5.8.0 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 07 March 2026 11:43:57 -0500 (0:00:00.447) 0:00:43.081 ******** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.8.0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 07 March 2026 11:43:57 -0500 (0:00:00.020) 0:00:43.102 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 07 March 2026 11:43:57 -0500 (0:00:00.019) 0:00:43.122 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(podman_quadlet_specs | length > 0) or (podman_secrets | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 07 March 2026 11:43:57 -0500 (0:00:00.033) 0:00:43.155 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 07 March 2026 11:43:57 -0500 (0:00:00.028) 0:00:43.184 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 07 March 2026 11:43:57 -0500 (0:00:00.026) 0:00:43.211 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 07 March 2026 11:43:57 -0500 (0:00:00.021) 0:00:43.232 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:10 Saturday 07 March 2026 11:43:57 -0500 (0:00:00.036) 0:00:43.269 ******** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:17 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.505) 0:00:43.775 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_handle_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:24 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.021) 0:00:43.796 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.029) 0:00:43.826 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1771804800.0, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "9117e8a5afa3220d98f04938893af461a8e3008b", "ctime": 1772901831.1052737, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9335075, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1771804800.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1635770157", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.420) 0:00:44.247 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.019) 0:00:44.267 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.018) 0:00:44.286 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:73 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.019) 0:00:44.305 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:78 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.039) 0:00:44.345 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:83 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.019) 0:00:44.365 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:93 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.018) 0:00:44.384 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:100 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.018) 0:00:44.403 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.020) 0:00:44.423 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.048) 0:00:44.471 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 07 March 2026 11:43:58 -0500 (0:00:00.034) 0:00:44.505 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.019) 0:00:44.525 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.017) 0:00:44.543 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.034) 0:00:44.578 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.018) 0:00:44.596 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.018) 0:00:44.615 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.036) 0:00:44.652 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.018) 0:00:44.670 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.018) 0:00:44.688 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.039) 0:00:44.728 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.026) 0:00:44.754 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.027) 0:00:44.782 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.018) 0:00:44.800 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.018) 0:00:44.819 ******** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.079) 0:00:44.899 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:2 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.030) 0:00:44.929 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:10 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.024) 0:00:44.954 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:15 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.354) 0:00:45.308 ******** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:22 Saturday 07 March 2026 11:43:59 -0500 (0:00:00.025) 0:00:45.333 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:27 Saturday 07 March 2026 11:44:00 -0500 (0:00:00.350) 0:00:45.683 ******** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:31 Saturday 07 March 2026 11:44:00 -0500 (0:00:00.028) 0:00:45.712 ******** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_10.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_10.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 07 March 2026 11:44:00 -0500 (0:00:00.045) 0:00:45.758 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Run systemctl] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:5 Saturday 07 March 2026 11:44:00 -0500 (0:00:00.044) 0:00:45.802 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.008656", "end": "2026-03-07 11:44:00.608836", "failed_when_result": false, "rc": 0, "start": "2026-03-07 11:44:00.600180" } STDOUT: running TASK [fedora.linux_system_roles.firewall : Require installed systemd] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:12 Saturday 07 March 2026 11:44:00 -0500 (0:00:00.371) 0:00:46.174 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "\"No such file or directory\" in __is_system_running.msg | d(\"\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:17 Saturday 07 March 2026 11:44:00 -0500 (0:00:00.024) 0:00:46.199 ******** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 07 March 2026 11:44:00 -0500 (0:00:00.025) 0:00:46.224 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34 Saturday 07 March 2026 11:44:01 -0500 (0:00:01.218) 0:00:47.443 ******** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:39 Saturday 07 March 2026 11:44:01 -0500 (0:00:00.047) 0:00:47.490 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:44 Saturday 07 March 2026 11:44:02 -0500 (0:00:00.020) 0:00:47.510 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:8 Saturday 07 March 2026 11:44:02 -0500 (0:00:00.019) 0:00:47.530 ******** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:17 Saturday 07 March 2026 11:44:02 -0500 (0:00:00.026) 0:00:47.556 ******** skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'nftables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'iptables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'ufw', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:27 Saturday 07 March 2026 11:44:02 -0500 (0:00:00.032) 0:00:47.589 ******** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "polkit.service dbus.socket system.slice sysinit.target dbus-broker.service basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "network-pre.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target ebtables.service iptables.service ipset.service ip6tables.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3062312960", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "tmpfs", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket system.slice sysinit.target dbus-broker.service", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:33 Saturday 07 March 2026 11:44:02 -0500 (0:00:00.770) 0:00:48.359 ******** changed: [managed-node2] => { "changed": true, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "dbus-broker.service system.slice dbus.socket polkit.service basic.target sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "network-pre.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "ipset.service shutdown.target ebtables.service iptables.service ip6tables.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3061940224", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "tmpfs", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus-broker.service system.slice dbus.socket sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:39 Saturday 07 March 2026 11:44:04 -0500 (0:00:01.505) 0:00:49.865 ******** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:48 Saturday 07 March 2026 11:44:04 -0500 (0:00:00.039) 0:00:49.905 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:61 Saturday 07 March 2026 11:44:04 -0500 (0:00:00.018) 0:00:49.923 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:77 Saturday 07 March 2026 11:44:04 -0500 (0:00:00.041) 0:00:49.964 ******** changed: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "15001-15003/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.680) 0:00:50.644 ******** skipping: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "port": "15001-15003/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:141 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.041) 0:00:50.686 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:150 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.027) 0:00:50.714 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:156 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.020) 0:00:50.734 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:165 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.019) 0:00:50.754 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:176 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.019) 0:00:50.773 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:182 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.017) 0:00:50.790 ******** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.022) 0:00:50.812 ******** redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean included: fedora.linux_system_roles.selinux for managed-node2 TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.135) 0:00:50.948 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.038) 0:00:50.986 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.042) 0:00:51.029 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.039) 0:00:51.068 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.359) 0:00:51.427 ******** ok: [managed-node2] => { "ansible_facts": { "__selinux_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Saturday 07 March 2026 11:44:05 -0500 (0:00:00.031) 0:00:51.459 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Saturday 07 March 2026 11:44:06 -0500 (0:00:00.354) 0:00:51.814 ******** ok: [managed-node2] => { "ansible_facts": { "__selinux_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Saturday 07 March 2026 11:44:06 -0500 (0:00:00.032) 0:00:51.846 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['python_version'] is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Saturday 07 March 2026 11:44:06 -0500 (0:00:00.030) 0:00:51.877 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: python3-libselinux python3-policycoreutils TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Saturday 07 March 2026 11:44:07 -0500 (0:00:00.783) 0:00:52.660 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['os_family'] == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure grubby used to modify selinux kernel parameter] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Saturday 07 March 2026 11:44:07 -0500 (0:00:00.030) 0:00:52.690 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: grubby TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:67 Saturday 07 March 2026 11:44:07 -0500 (0:00:00.788) 0:00:53.479 ******** changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: policycoreutils-python-utils-3.10-1.el10.noarch" ] } lsrpackages: policycoreutils-python-utils TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:81 Saturday 07 March 2026 11:44:09 -0500 (0:00:01.251) 0:00:54.730 ******** skipping: [managed-node2] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:86 Saturday 07 March 2026 11:44:09 -0500 (0:00:00.049) 0:00:54.779 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:91 Saturday 07 March 2026 11:44:09 -0500 (0:00:00.020) 0:00:54.799 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:98 Saturday 07 March 2026 11:44:09 -0500 (0:00:00.019) 0:00:54.819 ******** ok: [managed-node2] TASK [fedora.linux_system_roles.selinux : Run systemctl] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:8 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.987) 0:00:55.806 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.009267", "end": "2026-03-07 11:44:10.610681", "failed_when_result": false, "rc": 0, "start": "2026-03-07 11:44:10.601414" } STDOUT: running TASK [fedora.linux_system_roles.selinux : Require installed systemd] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:15 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.373) 0:00:56.180 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "\"No such file or directory\" in __is_system_running.msg | d(\"\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:20 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.024) 0:00:56.205 ******** ok: [managed-node2] => { "ansible_facts": { "__selinux_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:29 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.026) 0:00:56.231 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(not selinux_state is none and selinux_state | length > 0) or (not selinux_policy is none and selinux_policy | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:40 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.028) 0:00:56.260 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['selinux']['status'] == \"disabled\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:52 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.023) 0:00:56.283 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [Add or remove selinux=0 from args as needed] ***************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:56 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.026) 0:00:56.309 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __update_kernel_param", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:70 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.026) 0:00:56.336 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:77 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.016) 0:00:56.352 ******** skipping: [managed-node2] => { "false_condition": "ansible_facts['selinux']['status'] == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:82 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.022) 0:00:56.374 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:90 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.017) 0:00:56.392 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:95 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.017) 0:00:56.410 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:100 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.016) 0:00:56.427 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:105 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.019) 0:00:56.446 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:110 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.017) 0:00:56.463 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:121 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.016) 0:00:56.480 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:134 Saturday 07 March 2026 11:44:10 -0500 (0:00:00.017) 0:00:56.497 ******** changed: [managed-node2] => (item={'ports': '15001-15003', 'setype': 'http_port_t'}) => { "__selinux_item": { "ports": "15001-15003", "setype": "http_port_t" }, "ansible_loop_var": "__selinux_item", "changed": true, "ports": [ "15001-15003" ], "proto": "tcp", "setype": "http_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:146 Saturday 07 March 2026 11:44:12 -0500 (0:00:01.279) 0:00:57.777 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 Saturday 07 March 2026 11:44:12 -0500 (0:00:00.022) 0:00:57.800 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:7bd953bc370c70fe9299b766f8a40a1659e03f7ef4dd6c722c3e182bc90c1c68", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:e8caedff457d24c0562673868860f813a6cf223422bc48524e7cf1e8df7ddeb6", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:1150e95aa33304027895200fbac6de5d0ec1ada237d1cf255f979bcf712831ba", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:634c80be00ac898add54ea6d59ead5a6e92e4d06a230b9b4485059070b0a3bde", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:90f08987cd8645d1bc99245841a9f2d0c9858196064df233655623d1b5cfbdde", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:c59e1e8e511ef99a0e5715ed9dd2c15ea0b522186e683ed8bf715029c4ef325c", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:ee1199b88bcd39ff6de202bdef25f1dc7292828d80856fa535fb80454dad000e", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:3b9f22d94579c8dd60f827159f6f15a2085d9bb799cbc88d7c1d23ce7a63aab4", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:449d303fa3e44bb7afa7b0a715e9566e1e33fd3368aee1b078529f0225cf56ff", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bfefb6205876b2f58e84c1952c749c146f4e2b8107a660e084614b23d60300c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:3a903d39c2d9de406f33790f234fde1f1d0b20bacae36fa0c6bfb5fee9f800c5", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:35030bf2d1dc7ec055a954de113ff7918709262d5c318040b0cbd07018e9ee88", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:5da016180d7da3fa18541f72cc69eb5c9ffebc2851ec3e6150bfd5a73153f860", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:6432b280ab64da2e35f7df339167f29bc9b9dca4c01e8e8a0c409b7a0adbd5d1", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:856e89b68ecf997f8a33e98c7e4bd2250a43f88790efba170f787434139a8c0b", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:78ff1f7154a00c128cbf5c237452baf7ed1cd46cb11378439b64432d1db58d4f", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:1e517a22f8a71ea3ef177798685dfb6359b1006205fdc97a0972ff1cf7125f40", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:454cc3d74ae64acf78ad17344d47579841f6b44266c6c3d56f58594918d2e3cc", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:4d13ddead5cb94be9c944061044e0bd56974a9db9df64f7259593b57d51386d5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:00bebe07cf015d4084870d1f0866913ae687801ca2d26e12c00df6823b3bc304", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:0cb5bf9ff94cee18667b41dc4d1b988ace9baa06ca99507a91ff3190f4e39d35", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:233825c029885cb6196920f19b27336b444411b9a15b956c95a2a07b89e9b041", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:afaeabb15d1d5e4f3d07865c5213f4a78ae5865d0f782e95d1c599e61b7ed7d3", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:2c8ef6be5667ad71b144c8bd4ec606b56cecd4e3ea1d242cbc657c1c993d99af", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dd35cbec0b5e8f81e3394a60905606fb9d986fd394ad60ccedfcdb60f0137b0b", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:e89032180210c66a288c43d2de3a47b285d38fa239226bd49ae19a1a0488f41c", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:96474cc59c799aa0e25123ea9909b4fb319a03f1b5f6cbbf1ae3dcda374815a7", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:7c8fa6c136fc6624a1dd4345c3484ffbc07c9a4be8b7543d78f0615680cb73cc", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:1b066f5d029b5584d34d95007991d218446244f994f3ff802339cd5890e48091", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:60ca58fba194f53faf1c0bc41f8eeeba9ca3de6f2da08f8940b6d1d3093e7c0f", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:815d2bba5c316d5d0334add30dca473daf3fdc85e48785c26c7b47b2ef833823", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:4ec687f59310bcb03685bec14fec451d393508d1ca5f926209ba967d42673d90", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:b6821587c3b2df8dc3ce8de9851cb1be120dfd68e5729141e7a293917029e978", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:df9850293d6833d206bfb3a875bdf69d0823daf24993b30f962da683032555e2", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:de4651616a6c8dea0dd4b018d3ab32c1506ba75188d1bcab2e04af461eea6040", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:91ab7c5c9df2a80b515c52b105f54e9247b092be7864be939d880b2f94cec862", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:bec8a93b694c60226db8744867c6f87775440937699ac0d023e06e7b7aee1d6b", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:3f5f3b049123ab0a61d1f7a7e6372bd7d2194feb212f2b5bd85a9148f21f7db6", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:0ae822bb67f347f0a88f4ec8584f394e3e10fc11363dcf34b1d583305e76c9e6", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d20dacb3b990c66c37bbf1bbd081a84a0e35f3cdf1501c27a5ec881c3d187d84", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:090e59b1324bf559d79a1ef363fe9bc1bd2adb928f6a95bb1628c92f93063415", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:80b987a686635b3e05bedf481ef892af7231100a61fbf6ca5e93da17dbb887c3", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:9fa130934871404f743c4803af509afa78e56b3ba2f83bd108564858f163329f", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:a68798c10fa97ddee5f54ac1d1281ecce65750e4e151076f4ad826187fc647a2", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4e04381e36d9df4d9f19ad718b1ddf4686f633f72b24d1161055b1f7280a81d4", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:33c562fd35e8b9fc5fdf807c488d1ac4adfa6c3b92dbbf87034a6732478e1bf7", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:8279ce237a5b4ffe5a80db09e71f06bdc8a4838910274ffc4e240ec99c185df5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:f89476b4ce6acf51cb0628609027a6c44a90db4ccde4da07505b5332a00b7c63", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:8a072efaf9d6f3af5ec04477f28ec73585274598b69d2e8f24c8180dcfacb15c", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:d5d67d239ca7cd2acbd4c5e15fbbc0f97810139fd352e9966c1e63a7d6ab5188", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a4da29d700315627bf480c63220b2b639ec0b87435f9ecca111eed86c1e019cd", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:36bd297ee2c16ed1564895422c05f51d957f09ef17120ac2efc93dc46d2d81a0", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:053f0dac3d8bc41d9dcbaf9b3f1c2e55ec313e07465db7462fdacf8fd89ce553", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:97391dbc81358c09228185edb79cadecb15bf8641fe8b6f3cf9ef970d79644ec", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:9fb7d00873d78a196b1fb639f107a92cf007803c7eaa2658eba7ed05081acb99", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:59f3c694a3ba5e60ece2b1ddeb5f5bd4f00fdaa67a5c7aa3a8fe7bd302963523", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:c05ca77b6a73640331abcf4018a9b7f2f3733f9e128bd96d7131ab7ba1fa823c", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:0e54e45a5adaa7cc24e6a273e25693919e92f498e42b8e136b7d7bf29be2d6af", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:629423401aaf5d0f529905a421a461d2f1d7ddbdb94020a140831f8873724c39", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:7ec2279bb83c931e6f379f45255a0727d207838ab55930f7595e0ab1e95b8db3", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:601b41f04bdd9789e01a1158241a17c7c4f937c88adbc75e9bf8875ee7cb0756", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:9f9cfd140d7b13b9679ba8b8d7a59366294db02d816d60af2e00a3fff1f6fed9", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:5d64fbf2f59d2c8ce842a9e8adf39877e41bb1d3e77c374681044aafbd662d7d", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:1ce15bea5149f786d9b714426a2870c43d01107f2e3a6bd4b5b324a166508dbf", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:cd287fe5971d71a4512ad52ad855f427c8b722cf7aec6e884ca646ca3da0df2b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f6643411d4b5fbc33bd87d4b3b1d4ea1b5d3659a2092cdee9ecbd4dd700af416", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:41bc4ffe76c9e5c220822efd68a2e55b1126b38f646b7c4016a36263a89e482d", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:fb9a0c7ec7a8627b89649e44dd9e2d6e4cf70166b2a55f6509f898695510376b", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:8b52f0bebd92342ee6b7e00dfe3e20d3a0f041badd4312b9b22b3d3ab0d1b3b1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:22ed092464b3757fcc58749af15cc33319f406db1747f4b28f74feb123969612", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:7633e1cf2075f6323862d89b5e0072681e64e41895b6caabbc8c6b18223dce9c", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1da30094d8664d16dee43b934829c800003e49304f1540e5b41f9fb12a2df4df", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:6cdf81585aeb903ef5da64551f6bde953aeb48f8623a8d416485847541b7b283", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:1af7de0f7c691873148f17453849b3dee97e78a1e8108755c1c133c05f29b651", "enabled": 1 } }, "distcc": { "100": { "checksum": "sha256:bd9199873915ce6fadfc570fba837765971726dac64a74e1ba74c55dc0b24067", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:1205bd72660c46019cfb8c3a899accaefb280f5f6bda63850ee2b508cc4542d6", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:b799553c2c0ab0abd040196142394a15d429e15b573df56edd0e150295d6993c", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:bdaf9c5be3de423b3d1b72c8bf38e2315fd58ce10ca6a58873c7d3e3a9c8aed2", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:1de79cf621df4cb04b8ee1201f38c91d8a23cfd85928894d4f9a8d3a27dd99e1", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5d8847ac4f68cf59bdc174bc1ce3688f86efbdd4a4563f701cdc74b2fa01504c", "enabled": 1 } }, "extra_varrun": { "400": { "checksum": "sha256:6c694e4be5a9d1895e17048eace0eb110c69a81ab1d1e01d59c2a075e08a4f42", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:58fbe8fa7832fec940b7afc7ffe8e4357ddb5a03a662687b928f84029d81c781", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:c821191e37683fab6a25fa714edaa75bcd7a81760fa8b547c31e40967875a29c", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:09288902a734ceef738fc904463b50798ce700c15059c70d092412b12ead156d", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:9fbdec8e421e1fa27dfea13b163cd0810d404845ee724b6f1b3ca5e6500a42c0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:9144a6012aa7771292a276576f811b7948abf4b7fe2e07f05c66d232d5811055", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:ae1f3ce0ff3a003f1db93dbbe09084b0ba32675b332f9930f23f9f5e66f57204", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:60856e056bdd9de8ffce0f5468846b00616fad40f87d38d5fa73acb74475d83b", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:8d10737fea4fe0dd3ae3725002a8f0c5889a3645ba4894e9dccec01a3e51b3d9", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:260a661a05f5958d32eecc692d9d5350d51ec0ef9e9bf29aad653d8637ceba29", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:e206bfbfcbe748672784fe52a91a1220965bcae5ff57dab458ade953f0b17b80", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:8826b12f85b02168080b03dec5eef5c91283ba1ebf8370022a71170064a97dcc", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:00b8b8e23b9e36087646cffa7c5126b0a402ac38a958930d27fd058f78f67987", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:181e899c092e42a648f7474f936d3413769842e4a0192dbc91cf587cd1547ffc", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:54578edd17537e1639df33aa54a731059844519c32cb8dee24e31b29f499dc67", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:325a80a2f12fed84077e57ac8725cdbd3449114115ac74904280c05c4d9f1597", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:9ac486b2d71758e95a106894de9c4f5b21506e07caba5d3753964556cb042fab", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:0a0e0d24bb9866726e90384d92166829d3c43e6086613b425735544745295adf", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:cc208709ab1c0862004f9576e53a62665826c6cdb5f443eb463d8743cc399769", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:9505b4010a4aafa33b27c1a73f02f7fb2ff720e95ef943b40db387b893b7499a", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:a1966f6618bc0d636a87d83d852abba0b92bcb8aaafe82837b39958954490ad5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:80108836908472e7859b47ff8ba90d2c629f02666a3246c2dc7e6039ee1dc099", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:42e7cda751258014b8bf2492522d20dcc0a1c96027d8261b7996289ad136ee7d", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:840c649229032dfd9b5880f50fcd371e5cc4c87fba7d424f03f3f5f28cb1f686", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:ce63d6d0ffc035614b61d82eae48a44485151cb6e93a0617c782116187ab1ad3", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:3b3f4538fdffe23885b90ece09b6859afc8a0b7f3314b9b4a60bcb9525776725", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:8184e98e265b9082358f87a8a715bf235f96c31008e60541b742525e7f09bce2", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a57b0a11f54bad916a170bf890b15978ad925ccc5e976d9d7b94b6c66f7c2e83", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:fc4a2c076ee26500d58559dfd29fe267a6f1ec33515064c8daa16448b7aaca9a", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:b13286a614402a3538fc0387f3d7abc30085c382a33e83faed9be57f33b63f45", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:37d95ab4a25b542db931edf26632d35e3a969239ff1de338b037e2e5ec506fad", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:1eab1ed96a9f87898b99be5005c598d35dc079b1ab5a7214ceb6e3e5c50f8810", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:6719dc568ff70220e53b2f1ed86d9a395a2f038d99901396022e4dc63d4ae868", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c280b017518cea08d176260a60012fd4d62882dcdf6bc9fc2005c74573b2240c", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:a6e5ded6ba1592d16d507e4f87b6078156d99e9554184a9912a3a91819ebb5df", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:a90844f8b8a25de5abadb4887f1b1ac84367f5ae248d9213a90a39859b3e5df3", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:40b455ce92e388b7f1eb0c65645000ae54076221c2acce0fa34c6f8d29d6ee67", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:392808628481e796663a1b99d1340efca31995d4832ec45fe71a939f12c117e7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:59557d1383fbb0a9586e18a4b129912d3ff989dbb853ed29bd0e27dfc160351d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:c850d134886113631f28665513a0536ca98fce16e53a9b3f146d1449ae9e0ee5", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:208231fcd39727d36f759dca410d8675e5852b7330f966aa86dc6e37c9abb22b", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:593cf420e0ac5523489f53d4b0cf2af0eaf8821d841f947349963159834a764a", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:630a305bf2ae45b8211c97cd029f1ae4247e0a00f936d8595e3cff59570cbd5f", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:104ca47441ca07c42c5e4770c1eae2178d2cdb880a174581032c7f846a05fb6e", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:b0baf75f1edb1c27f1caf49a30874604f82791ee1b1c85c38a06195f8d806b0e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:ba9aeb152542b5bd253d5a6e3b6aeff3e857615f4f42836c19098d45263fb120", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:177e6ff2bd9b8e6800b6138497d26b5cdd005046f6c62f672ecc66701b1251c9", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:32c9122d027bf6229b8cf18a4d45fc63e38c5b0a3656312854833e4342e0e608", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:42c6066d4a0751cb1db4526c055b0527a4d9403b45794571ea0dc4c71a666bec", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:997985873de7774ecab07db71db7974723494b65a569e2f852977c25d381359c", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:80496dfdf52576d83029c83097446766868b289a06aab9e9df110b733594a98e", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:c739061ae87ecfdebea9afd0b8021aa3ea154e8e1ef00ba148c82d225ee0c8d2", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:81d97ceabbc97f1b524d3e0e60904f5225fcc44996a83d9db67b7ef3d8b18075", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:8eecfbe8b3b75068c3c26b6fee1cd79009098d65b962b8a847438e8c31e9d053", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:2ae3ef5124e180523c5f610cbd536ad55c7e0b8e7c551201c29827e59c7c1594", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:34f943a522e251615c58df783c4ace2086a1752a3b69e5cbfef2ec5d42234da5", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:a0a2baa7b6c1d5ed5e5582f7ffc7d5a8cf2d4e7d034f50b1f3d0972fc9674939", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:78f45331782c43239be7330f5b928d9dace6b3ebbfda5e07c1374c462fe06923", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:41297d28af002c4e97c864d3b5ee64f49519b4db72a71b5bf7cd104c2b05af0a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:2d6c154dc940a2c178931902f7e0c0a1e9f9956055f92fc1bc92b1f2143a674d", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:33d8e3fbc9f8f48ff7a69685721a782c9f8b62bbbd1878e9bafefad5bdcf51db", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:653fca3667c90bf30da196ab61d79ee5afe1ae9703324b2512180986eec8d6c2", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:949cb7c7b62d17c998f63d9970d6fefbf5b3d56d65f729bf21a4f6703135e3f4", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:c1e22e4778b465a08d815aaf53d71ba28122b061bef976f522a2304366849a2d", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:a308db644962bd0893fe1b8bc6571460b377f728ac28632852ca3b9c281ed74e", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:9925a9acfb6375d93a08546a581a90375ee8582972cfc9d6884204d538b895e6", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:0c9136b18fb83249b1dd825fd497435d852adfaddc9d618ac4d269843a458317", "enabled": 1 } }, "ktls": { "100": { "checksum": "sha256:f15a20f050208e43060eafa61f63a8e722792b76724c7f2fc44c856879ac70ae", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f2322f689c55de691d98651af5bfece0b87608950ccd1a92e9225cfe47415851", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:454587674794c66f8b25f9e90154c291e81f6ab93d7c8fb3107068cfcefb797d", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:4d05909abe38f75a72561bb28fb279f4771d6886406de5d4665111db56181972", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:dbd4d9d61f7e57925f7a61e0a42d65273d8be168f6e3c77b5467d7b9a93817ff", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:3121357ab50a02cfc634a5fe4250aff89a1418865918569b77a10cd333cc0018", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:3390d25acd3ece1c7404db8c3db0f5c80278d5063fab9c8f4a8bb5584b5ded16", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:bc457c7839567f5943e06ec31f915742988f5e602c918a3a0d46bde5b94b6c78", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:d369ef834c0087ca09871e4dff0128cfc8e39a97e1e3b5bd3001fd752b7af5cb", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:c739c49825488aa1ae74fd218a5718aa3c859cd1205a1ea581710fe539bfbde6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:6a59e4d4df92e3d73d66b34035aaf00f5ca0306da24bd478c72a39c7e7844960", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4196d8e4db83bd37b4e883383dfe8543fb33029b42c557fe5af7e8475b558584", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:5427ae01212227c3a719cd1e5664c1290175bd574d7927903102147fa51989c0", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:7d1a24bbfe8deb3a3d7aaa92bfc9c922baba1476561b92f828aae226fe9dc3c4", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:b772895524eef04c9c79093c837e6033beff39717343d76528a8a85e4a466bb6", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:5017fd004213b4ceaf374bebf74e35a0084faaf6cede37b78769036a05e34b9e", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:7c71eef6360c66869a42a19a34ee30abc1064de8fbbcec0098d2ee57fbedb79a", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:cf5a647f3682f454b850317643416460ce6a7710f3f5fec6b0deac40e3c72e07", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:067389c903715a12a93937a436e3df918c42a4871765668bea50eca4f02212ba", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:6cffe11f14b5c03ba0969f0a3f476455cfac505f2cc1f2d467222a21a3ed7c5c", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1ea9c32ae0a7becd1e1879dd4c4b367d450b2721dd8fc3f771081d1568b450f5", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ea5057da646444d5450ff16e5dcb82ab338e8fd5fcf5f8dd72e782ef18ad1031", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:8de073e5cf69c58d03162e50f5fe7537ac8f90c81f02d2906cb10a910a414ec7", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:7d0336a428c29ae9a91c18857f594a16f74f5a963607fff966e7de78102ff76b", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:0464738bfa038fc9ba7ce06c15abf3ff5c2113083e236dd8b96b5d85b1fb51b7", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:9489c6c732b353e34ed3e5624fe8b73c336f4786c47bc30827b4a5a59b7dca44", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:660ecac63132d47b51afaeea6f55f74e3a6f25141a4d0d28065e094d7cdc6c75", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:b0a2d9c52715e340983df89e8adb304ff3790b2564659fd821843a3f172d46d0", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:04b77283c6d821ca98ecb58ef7bd17f6f185168786887a67f4c71cceeaa0476c", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:ff9433431cb560a4ff03dc02129289a0f78d1909fe1f3954347f18e318c3cdc4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:dc069f3a6c78dc367c39cd7e50fe17948cf9877f3e306f090f1160b07989d503", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6890958fb0f7c357a4a9600c34e21bf6fc9fd8ef36e9a5ad516b3bf2c1d88bd6", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b61027e2a84c3f6fffbc7eb3fd40788bd9dfb036b3e04a8f77d233e10c9f2ec8", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:e08540cc55168dd36811b1962936ffacaa21be50b15b9d5d34fa9d55dfd125d8", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:bd730a6479baa42060a62b9c7346dfe21ce28e1a8a432342aa5f302c2cf8ef86", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:01131128229571749a7f5df2e65e22e9850789bfe386926cb34e91153ca9e88c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:edb0f4d496b429a2b09ff9b1d74bd30126b5ee2265a4370f6e992cf9d696de0e", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:b28911955f6731646cd779f6b89c2255238c3e60e1b93d227ce588484694f755", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8bc2fc39e9a6cef06df178607ff3e17604e86d709575d37a60de5c1fd2b9fead", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:6980bdebf1af99aa6822dc970cd6d5a5b430381aa11e96e40244db39265b5e4f", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:3b235676dff7abd25b2b57fa770833d05561bdd24216f4de1202e9ced52a4f4a", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:33be40fa2b50df5f7234ead34a6471ff1eea62de62445e509c28e5bc8a730364", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:0d4fd8a1f74c8e46c18a93794b305dcccf3d50e9db095b659d996712e2905dc0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:d4f61bea290cce978cbb1653866414f9f848bc56ee6491cf022e9131dd2ff5fe", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:6f174abacc65b0de9248c39a31210eecb6fdbcd15ecff5bc254fb0d366f83806", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5053d74b0f4734131234b4faf6cf7815a725bfd5b73b6acf07deb77a3cced1e2", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:0538a3f6b5c469223bfb2740d7365838eedf7ef65b89353645e9d3bf6e17253c", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:f8b11739918f67700fbef58c2ab5c87a61413acf6aa8b650a014285c0c3684e2", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:a3b7c308fe73bec0edcfceb85e1e1799927a4d7e25ec4314649b447f670a49ef", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:dd752acc5dc10414a4708dc0bc655d7861bfa74bb20863aa10335dacc53357ba", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:bd4724acfb4c0ec9283595e24e29f9926c18e7af0169fd5eb344ed00de6bf393", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f1e989b744c90ee0be0978d34da65a84fdd81e5b6aef8ba116560bc157d73f0a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:d2bd05813a6a5257688f9bb486a1bda49fb169eab4f16c3d503e01883c52bd11", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:03597af2e3a916f7c4eb83e1b360b24cad9e86ce814494bd68da602991a70e7e", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:66173ad07abd0c8bb7e529350399507549601923afeca8e2ff2b0f80cb9992e3", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:3399e9663584d6d1032992f903b7aba4f96f4f0b7a5971faf90eb816cc7655b3", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:c1107cdfed17e78cabd9094b3f6aa1d9537f70bb4ddfc236983cc5fdc167e8ca", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c73d5f710032819a6456d1020ef5fc8bb683aeb167b6169f56a295c31b14c72d", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:d733f8dbbcdcfa398f6f139831236fa6cd0abdf132090435bb647081d2f6a785", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:44657ecdfa5bc1235f85a50222e025ac4721b24a01af6d167525f7cb0a580c31", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:92ded69a63e7ecda34b1d8ef17ffae8c9e8075046a724f8f8242f4b66d2eff19", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:5dc833e3b3dd31a1af446c7883f6a2b92c40b9192d072ef5de2fda7ddf4f84ad", "enabled": 1 } }, "passt": { "200": { "checksum": "sha256:d778011449f026622cc05ab496a39b6aa55a7e6447621a5ff7afc242b155b0e2", "enabled": 1 } }, "passt-repair": { "200": { "checksum": "sha256:7db523cb1e14c32587544907a28237c09c418307c349a9c6c5a0095c9ef22533", "enabled": 1 } }, "pasta": { "200": { "checksum": "sha256:cbdee1f9990db7defe1393b55569dcf01a84786f38a49e923b023c7c87bc2571", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:924bf0bf4f0b2ea9d633ef46f55793acb2eb3da6379bacd355814507e5ddf67a", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:8d6835bdf52f73dfd1acf73ce13ea8325b0bd3d0107b0ba86953fe2fbee20330", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:016a326cb4a747756723c0e7d675e4992e8abfd1f51a6c06aa93066bf45412ea", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:ee292c9774f2109ffcef5b2a1ac7ae68e44f719ba40d155f84287fe03a6c01af", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:5d77621f8da0f789c1b9ea9ac24925e02e0a7fe2a3a26cd7e5f46085277041bc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:6cfcf3051765f61e954cd243d3b652cee14d378e4925b12569512e5ae815b40e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:07669cb2df2c61ec4cb621f3332f77f351facaaf5232a8a72c61a5ee7bb44d71", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:24e235787e311d82b99df7b41d724da0e18edc3bc6443f9f83f8d6247e33cbac", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:2c0350e46ff4eb97af27f63025763c565d7097457d4cde6f46088afe7f8929e9", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6c7d4f4b8227aa55a5f142bbb8faef130cd10710101eb6f0aacb62547db5f49b", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:d59109d36dd2868269eb18631e37feb5981db0aa780c55f7e0fb66d897e4f48c", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:93a95273e16837c24572e635d58446ed1162ecbfed59695e866058df4dcbec2c", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:f878b2cf560b4bdff33fedf8c8f2011af390b77ee8f9416fe93ebf46153c97d0", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:7c128725a61bd30f3e35f39b9a832e5cd3ef435dde58241616b24e28f67ffbe1", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:60153b9f850c92927ce2a61becd9c248ef56dc0ceb7ba990185b98eaa9b011bd", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:ae9f1c81d0877b9f40c9d9bb5b862b7c58c73da9045f850a0a72d1b982fada35", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:8d550f8b9e80beafd06bc1392e60ecba8e922f8d0e609fb6674de5cf27c8d772", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ff82ca8bf6365948aeaf3c14fbc7ea9a212074d1462a31aa676b542d0d76c882", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:664148c3f8d4a649714cdbcf15e4862a5e648e0aea83d4530d23866c78c8d8d0", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:d58fb38422b37d406bf3e79136e3a94a40885c08f9c1591975c9a7495b7f606d", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:8194c7df0ea3abd18f07481b0181e01c5fddb21ebb594ed5b20bc1ced555fb27", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:ef1377e6864d9b5049866f6f0c3986e474499f1bb0082e9430f208e2c9d84b54", "enabled": 1 } }, "qgs": { "100": { "checksum": "sha256:add48a13d9b3cc5c82c73c2ca7d72db10b074970c14e26d58b88f670f9221655", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:c5e1779123c640fc55da0871bfd96bb124d8c9b50b9065136c025c83364f453e", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:71a7ff78c03cde811d19a4c115de8a898007bdf437a9350d4708b3f9142481c6", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:e66ffb20855170cda4ec60840ce05e73d69dcc54330c86b24dd89ee96bcd1d73", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:682232f167f6ecaafcb051df5557addc52b814e923f143bf37a2035fb17315ae", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:0fede9cbfe184d19e8ac7bb68a1ce8a110aa45898ca782e3c9daa5649a476fba", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:01fbaabbb5b83721fe19a813401d94510f6fb260714c3adcc40d54fbb994ef70", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:a8e3e2b90df3917dbaf684a1bdf72432d8bf2aa6ec41233e06a2eaf02aa81686", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:8d5ee75190133ca16f3931a80ba1202b6cc171e6a3b1cba6dc5788a33bc84e0a", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:fdf6e82be7b620aaea9c8928edc39344d32dd9b1c4e0f78a6c6fba39bc005b6d", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4788c42c425e54a8dedb4882a6a2bd2183ad72f980f4217299be830afe275069", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:7d65968a2e3d186de718f9f6604f2cce60bd08bab6dbe0e60f60222b228a5744", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:78d9abb7263a5c028d7065c0cadcfe14daf3b4aa064e679458f3bf271a69d2e5", "enabled": 1 } }, "redfish-finder": { "100": { "checksum": "sha256:e05fc89dc14e7a723647597786aa62adc255ca1301474ff0c29dff49e4176e4d", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:825a97c385fbcbfff670278b26a17f91bbfa8585f2219efc48781e0e510bf213", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:695b31e12a82435b57e11459e99444fec8d09aba051b1a12b8efa765608dc719", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:892885a058782b7fdfb5d86e5ec3ecca261363a14a2254652c6a7ff8a52807ae", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:39bc17cbd08c0377eb935fd0ca86b6542752c5ce07cb0f9d9e5d8adfe4306a13", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:3da6785a2c37296fb1ba2a1b621ebccc9e0837d9acf69b3442e75f3a60f2a484", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:912bf2ea73ebbfd1d5fefee37b336a9002345d01f8eb54cb164c28160fc4f1c1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:66b1ecc6382afc5032df2921281550af0431befd8cd517c4f8c68cab2eac0e11", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:4ed93113b5ea0760e89533919f86cf1dd26b5587a9d7cf8bd951896fc77d7fa9", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:008a840aa2183d0fbf1b3f3bb9542a7ba51c03a1e3a415b188ca49d2e4ed7e51", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:3ba51ade82ac9113ee060bb118c88deccc4a7732312c57576fd72a70f40154aa", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:b4fc4fbb8572088eb785b643f5d103d5791af96d37e6cce850d671d9291bf70f", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:6b4e7757f0422a2c54d93e920ff7b2c5bd894d495065b3827a741a768f042b18", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:702d5df73a6865bc249ffb537ad7a0d2388e1540716e4b2f7e844485870e37bb", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:4cfda0dd9868ff0890c7a612f07c282a8cbe4a319c766d7cf842ed639fc2b34c", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:64c59a71e1786fba000398e05773c83fbbd9f92c0341e52cbefd1386357b4e16", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:2f0c18590911b20c58bbc9db0c9c0c471f4d66171f7400079a2e956366580e24", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:f19a726a7c78ddd9aafcf8d2c4b6a57bd05fdc8450a91119e1f0d0abc09151dd", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:b29d987a469d59767e7120202e2abad06865eaa84d3eb61d2ae6b7a78c1d6dca", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:44e8808dad842eb55d51c204374ef445bd8515701db580d2c91f06ca9949f2f6", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:4b1585496c5777fe140f76f11a62df0ddad219336fac090139efbc368520d38c", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:2a990092d1cf38541a49375e9e605d82515a34e19b9ab6b70392afb596e0c612", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:80bda9a30a4b5ab4b6b14d7f6c92efbfd5a63658a4b44565a02c2c552cf4a28c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:405780af5278be0dd7f89425f91ca1c48527743d2b6876bdbdcc7545d487dc09", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:f76f5b094e42967dc240e161cb187bc528f2f2a3ee2ab93c53c0b15d820c0921", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:99c31c501752dfcb8460f44b4e363b9d57b85c3ad422a951f13f2d42e5f9f54b", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:8361387196f6c48bbed95c77561bdd324ab96356d6dd0f4874832accc67738a4", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:89169ffed763d6257769d5ed83185a9eb376145baa60dbf01b4088f37aa663bb", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:7727a62bcf612392c76d46f3cc8c22f33c3c87c30a320805ac9844ce68409ecf", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:1ad633f30ae0f80052b31090652780dab90b10696c098ac81ea831035a652835", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:c9cbfb3894148ab693f0c850232f3a1b1aefe5c5cf5f4a06bc74d44cdd2b52f5", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:67b8654cf2404ad763f5343ad3ded35f198c26e99b8a9a150143911acc89ac6c", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:6ce5485715b3caab30a72313601de971e7118bc2997a2edf6ce7b229e51c2483", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:9ff7693f6fb994a0a53dc46230b7ce6c4fe6dccc2b2ec2c8ba49f7c1e3f24eea", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c888a4b5fc698c1bf7551bfbc6d6ea7673a5f7f41d2467af7e15ce634c71e2be", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:1ed05c5ce069437c9de8a57326a0329d883ec753f3a11fe4f70a43ad212ec482", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:191a531a60c27b33fadbdb48213980f03b68efec3287545eff3592fcdf4bf686", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:e6f726edf701657c80853712b94a4bf5dd0430254d93db45804e60a243c51818", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:8a6ef7c3d8ee76e112224e0c4e0b91572db8c85f547bbed6d7ce3f6f6d4383de", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:cc162915cf1fc3cc66616c3224e9e848485198a28868c237adc9d7077791cba8", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:74b5c41b13bd849ce82040012f557fec4b9cfad3a9072f9f17f78400868da558", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:91acb71305dfde220ce7574e2ac67af16e6f8630639dc66d494cbf8120d2d07a", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:9b8a5c1ff4c21846701eb5e0603cc022f4530c568db6d9fab392e41c0ed64720", "enabled": 1 } }, "slrnpull": { "100": { "checksum": "sha256:bcf004c239b72d23fb4f1e5842272bc20f287cd312ed394464db8cb9218f4377", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:fc3eaf23ee99b98d2ff17a5df04776e8553f490d7f57d49a24061cd49bfaa997", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:17d8fa5ce4b9402dfb10ad431241cb2a5a1b2f726caa03ae7f1d7d410c2ab6ae", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:6506687dbaf850c784d6f2af14197d3c1768514fad98e08fea69e92a780ff65f", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:59b6f3643d2f404ef03d749628b6872fd650b5b10851862b4accad8276bc6f29", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:34b45f69552f2b284b1f6e0876e4a96d1c05c28e4ab42d2bc2a241c03fa73309", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:35ef9c580c4071208af6169ae1059bfee51938d36dbec2bc2354d51ed5dc505d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:5594f07c04c9057b74df1612012c2515265ee04d58b11bfa46a73531b703c1f7", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:b00a50f92d0e8ef2789d03756c7bee69f983edfc4a3f409304835ad25133e3a4", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:874410d4edbbd1f73ef0e69ea40e93054a5d65cfe1556b00f6b474b928400a39", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:400e9b1c9ace97d2e43b5916b453d189a5c6f60133876f15672a48607edfd0ba", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:66beadff1a4ed7e48b3f3cee1444f5f1aaa833d212cdc76068f2f306b8455970", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:fd8c0b8cc073d8025ab8754b7885e0375b4e700dd3fcc921c45666829b652de5", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:1b2a0e330daa04838742fdcd50a9b539072c58d48e949e4a3ce7933da47cbe3c", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:2ab07a8deeb7ef4cf09f94bd2ba250166a4d016bd9c581ddd470ab2784baf5e3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:e7caeb60df6f2002f7be4adc7a1506b6fb585e6bb9f4585381c115a90bff4a15", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:836d01ecc314a2b2b4eaaea69ce1e4a03f3274bd8bd25e2b64d0329e6f9d8f32", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:e2c86cd06c00d3ed79b9f7a602b18593d5929156df58e761a04a3cc3ba8be891", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:67fec37a17724a9b059f936b70c199d96906b9bbf703dd8a1670852dbfc7715f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:dd116a718e125ba88d28936b746a2292088080254134d2001084e2d252ce9379", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:df73dbc3f1e232bb5f4d3ba0bd1850eae3c3bc401508b1819c0989b8f67f8033", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2eb63b8ac8f3038eb1ff3bc18fc5923dee4ac3f609d8a14791300ae835249a9a", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:d342a188298c1fcd4df99c4235985c50ba2f02a4e53d01cef3de48bc31464ceb", "enabled": 1 } }, "switcheroo": { "100": { "checksum": "sha256:f8f67d2c990489a09a436dbd72704b13d6617fdbbb8c5c2c040a85b584de6a7b", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:a8f135ef10becc2a2ffd4e7faf89932ed4aff16331eb62d59e52ff2a5c0966e7", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:fc1ca3d8b12406dfef9f012c9275817169fbfafc411969e60d357be3b35835a8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:ab2acab6cbf273ed7e78e577b0e2a85225adba387b1a8908b180b07adb950e6f", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:815d229f0b5a8f8a44cd511b5927febb002596a8aad1b85406d674e59378a0e5", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:2a643246c63d64d4c57f3877ff3daca2637b195330920c2efd840ebade3fc20b", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:f3896d2de3794d7dd54fea03cbebcdf4e6b63bcc512d2fc14433b3be400f4188", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bbfd79953db88f6db10739803d29b003d83311a21c75604d64ed9fae26da541a", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:71c6423e6318342438fea1ba8a38751b5741b4482ca8ed075dbdd36bc6fda9aa", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:f482585c8f26517c6ed8e9203bec4adadec8ebc65840089d7483e31ee24fa679", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:a5312c216b56620ca8e69679e99275e793b3de9b6e524db1a5678d22b9909056", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:3a4e10afbea76bb0a825f3e10b6be09c1e380f19737aef7a6171a9744c15b33f", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:58aac19837bee6fd1c5e3d1e2a9c9900c56b9aff34b643fa9d958399152afbce", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:46f7b10654f710546a61324618f68b753849ea0b6a7e11f431922a5c848fae89", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:f3d5b0012a6f6d0255e831f608cf0d77f1af38a975b222a7f71cf0821f359246", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:2d749a0f3d39317412feb3388eec0eacb60859891ea7da50373271f03ab66c5a", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:5b1a3e31fee719423530b8c7c07b6649ab539d38f2b446a3e6d3f029a65696ae", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:561814e9fa4d9ffa1be3bcc8e27ee1a50260293a17de3db6eb9d4a83e14e8faf", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:48fac9542e02d0c8f461e03905339795331b4fcb2082e830e83189e50af59040", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:80d84cb83923e4d5d6b9870b4311a67c87609f010c5ffcdcb00ef6e926a8d785", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:33a8bba7a36dc094b6220c0dfe282a9e57ff280511965c99d654f4e584f960f0", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:38e42ce3f0baba47216f3b50d7bec9ac531a11d659c8807d0bb43b5e5b4ce873", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e9267049c61e87edd481214c8cedfc02cb396789c52a150b58d8fbf0401bd455", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:2f55ef3a5145328ed09f316753cec5b85f67c1b43902be5152fc57c4b95c3026", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:51ec0952bf860ec23e3bfdfd53f3bfad841a4e5b560cc25a9548c9b207504194", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ef06a218a285a5a01a1e354d6a40f826815203dc323d00ad68e29f85162c24e7", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:f71781a997aa0d0df5c9baa600b6212105c75cc290bf634a198ed0d5b42a668d", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:f58eadcb76889082e3a109afa993bc7eeed39675991d171a13744bc8b61c279a", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:4b8e317234ae08c1f4a80133c8abba35d412f5797db3c4515d0cf051c35af6bd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:3c2a65084450b2459115a69bb1d382e452a1da63080ac7fdc85bcac36affe1c7", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:ca220cb87bf9790b38738b6f08cc800a2fd0e083960aa4770c9385b897cd31cd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:cfcecf645d2d8a59f98135435d535133a39f70f46d9b47a65b15e88a3805861a", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:91a33317bdd39510dd305d768e2791d08b207d8384bfca22322ec49f5b26f9bd", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:c500e8df08994b81cc1d743db684060d03bfe4465fc12eea9a4af83a69af307b", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:db1d0917d263b447f9a744edfd4ebfeca697182c853295c7eaf49f1270218858", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:84679e67832759be8220885abe3fa0157305fc8f50efa604b1343e99907925dc", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:5ca3d53e3b62d5973442d210faf9b9f5f9b5f4935a74074ce4b18836c8d78b19", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d8fadd99af0d343c815f006330529911a5106641ed9c7d22a2eb72e0d9d55d2d", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:664ab4aa1e1eca422d2c627a22a9631ac348221893713bd9a4d97a628094b1b0", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e68a71817476b5ebb8ae2e13e9ea9418a31dd64ffe4e156258cb77029635cefa", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:f45c6d89a3305814e44a05c0d8c8f8a4ce8a923d721e83c9579f76d8d8cd909d", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:8d828eef8065f2486b815aea04ed491419e3bf17508cf0ce595fca71f872ba38", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:76a11dd14f578f940e874ab4d68ca1370ddfcb2585b6a3a955569fadb77d269f", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:17759c6e3a6229e4a40be0b8121751d768f00fd6ea0a872f4fe65bebe2280b30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:c9c26249a11c4bace4efa998ae826c3cd5178a19d323886a62b7e355ca3d8260", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:ea826918681193d37db69c814ee4c753fef3fcca809cd0fad6f924f829eeb9eb", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:a9e221f7f656f9f0b4937c2bd0f7b93124c7f48f4c88fe8ba608db1eaa5f05d1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:034bceb856cf79ac9329a4affb6cc53cf29c5bebb089c0ddd486a76148812b89", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ea40fa389e6fc510f40994b9b4272a6b985c80064b8a4d702d5813d5252487f5", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:308910f855a076bdf38241880815f6640dfba4b21ef1be58112deec3ed858d16", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:dd07546e8a114e1b7f5056d4c5b0f1256050fe93e867fbbb6c5f52d2c6f77ec6", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:870a818c9c3a4e4d24386bfc3fc7565af1c8aeec605b3d4cd819169172bb3e03", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:476c08aa43723ad6bb98a7254bc6cdad6ddab4aa63336719c192bbf6f5ba6700", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:e27315e58a548c06561117f2dcf86c67e6937dc1ef2071ee612975457091e40c", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:a077f44cc6d16684de9a93061ee0f7b212e3f729fdbdf594dee573fe5c30817d", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:8228eda847eeaa7529b089edb8c64763d03100e84117526a67fbb41ea006a2b0", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Set SELinux modules facts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:164 Saturday 07 March 2026 11:44:15 -0500 (0:00:02.813) 0:01:00.614 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:7bd953bc370c70fe9299b766f8a40a1659e03f7ef4dd6c722c3e182bc90c1c68", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:e8caedff457d24c0562673868860f813a6cf223422bc48524e7cf1e8df7ddeb6", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:1150e95aa33304027895200fbac6de5d0ec1ada237d1cf255f979bcf712831ba", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:634c80be00ac898add54ea6d59ead5a6e92e4d06a230b9b4485059070b0a3bde", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:90f08987cd8645d1bc99245841a9f2d0c9858196064df233655623d1b5cfbdde", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:c59e1e8e511ef99a0e5715ed9dd2c15ea0b522186e683ed8bf715029c4ef325c", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:ee1199b88bcd39ff6de202bdef25f1dc7292828d80856fa535fb80454dad000e", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:3b9f22d94579c8dd60f827159f6f15a2085d9bb799cbc88d7c1d23ce7a63aab4", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:449d303fa3e44bb7afa7b0a715e9566e1e33fd3368aee1b078529f0225cf56ff", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bfefb6205876b2f58e84c1952c749c146f4e2b8107a660e084614b23d60300c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:3a903d39c2d9de406f33790f234fde1f1d0b20bacae36fa0c6bfb5fee9f800c5", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:35030bf2d1dc7ec055a954de113ff7918709262d5c318040b0cbd07018e9ee88", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:5da016180d7da3fa18541f72cc69eb5c9ffebc2851ec3e6150bfd5a73153f860", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:6432b280ab64da2e35f7df339167f29bc9b9dca4c01e8e8a0c409b7a0adbd5d1", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:856e89b68ecf997f8a33e98c7e4bd2250a43f88790efba170f787434139a8c0b", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:78ff1f7154a00c128cbf5c237452baf7ed1cd46cb11378439b64432d1db58d4f", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:1e517a22f8a71ea3ef177798685dfb6359b1006205fdc97a0972ff1cf7125f40", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:454cc3d74ae64acf78ad17344d47579841f6b44266c6c3d56f58594918d2e3cc", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:4d13ddead5cb94be9c944061044e0bd56974a9db9df64f7259593b57d51386d5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:00bebe07cf015d4084870d1f0866913ae687801ca2d26e12c00df6823b3bc304", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:0cb5bf9ff94cee18667b41dc4d1b988ace9baa06ca99507a91ff3190f4e39d35", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:233825c029885cb6196920f19b27336b444411b9a15b956c95a2a07b89e9b041", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:afaeabb15d1d5e4f3d07865c5213f4a78ae5865d0f782e95d1c599e61b7ed7d3", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:2c8ef6be5667ad71b144c8bd4ec606b56cecd4e3ea1d242cbc657c1c993d99af", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dd35cbec0b5e8f81e3394a60905606fb9d986fd394ad60ccedfcdb60f0137b0b", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:e89032180210c66a288c43d2de3a47b285d38fa239226bd49ae19a1a0488f41c", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:96474cc59c799aa0e25123ea9909b4fb319a03f1b5f6cbbf1ae3dcda374815a7", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:7c8fa6c136fc6624a1dd4345c3484ffbc07c9a4be8b7543d78f0615680cb73cc", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:1b066f5d029b5584d34d95007991d218446244f994f3ff802339cd5890e48091", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:60ca58fba194f53faf1c0bc41f8eeeba9ca3de6f2da08f8940b6d1d3093e7c0f", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:815d2bba5c316d5d0334add30dca473daf3fdc85e48785c26c7b47b2ef833823", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:4ec687f59310bcb03685bec14fec451d393508d1ca5f926209ba967d42673d90", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:b6821587c3b2df8dc3ce8de9851cb1be120dfd68e5729141e7a293917029e978", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:df9850293d6833d206bfb3a875bdf69d0823daf24993b30f962da683032555e2", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:de4651616a6c8dea0dd4b018d3ab32c1506ba75188d1bcab2e04af461eea6040", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:91ab7c5c9df2a80b515c52b105f54e9247b092be7864be939d880b2f94cec862", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:bec8a93b694c60226db8744867c6f87775440937699ac0d023e06e7b7aee1d6b", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:3f5f3b049123ab0a61d1f7a7e6372bd7d2194feb212f2b5bd85a9148f21f7db6", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:0ae822bb67f347f0a88f4ec8584f394e3e10fc11363dcf34b1d583305e76c9e6", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d20dacb3b990c66c37bbf1bbd081a84a0e35f3cdf1501c27a5ec881c3d187d84", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:090e59b1324bf559d79a1ef363fe9bc1bd2adb928f6a95bb1628c92f93063415", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:80b987a686635b3e05bedf481ef892af7231100a61fbf6ca5e93da17dbb887c3", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:9fa130934871404f743c4803af509afa78e56b3ba2f83bd108564858f163329f", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:a68798c10fa97ddee5f54ac1d1281ecce65750e4e151076f4ad826187fc647a2", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4e04381e36d9df4d9f19ad718b1ddf4686f633f72b24d1161055b1f7280a81d4", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:33c562fd35e8b9fc5fdf807c488d1ac4adfa6c3b92dbbf87034a6732478e1bf7", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:8279ce237a5b4ffe5a80db09e71f06bdc8a4838910274ffc4e240ec99c185df5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:f89476b4ce6acf51cb0628609027a6c44a90db4ccde4da07505b5332a00b7c63", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:8a072efaf9d6f3af5ec04477f28ec73585274598b69d2e8f24c8180dcfacb15c", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:d5d67d239ca7cd2acbd4c5e15fbbc0f97810139fd352e9966c1e63a7d6ab5188", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a4da29d700315627bf480c63220b2b639ec0b87435f9ecca111eed86c1e019cd", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:36bd297ee2c16ed1564895422c05f51d957f09ef17120ac2efc93dc46d2d81a0", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:053f0dac3d8bc41d9dcbaf9b3f1c2e55ec313e07465db7462fdacf8fd89ce553", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:97391dbc81358c09228185edb79cadecb15bf8641fe8b6f3cf9ef970d79644ec", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:9fb7d00873d78a196b1fb639f107a92cf007803c7eaa2658eba7ed05081acb99", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:59f3c694a3ba5e60ece2b1ddeb5f5bd4f00fdaa67a5c7aa3a8fe7bd302963523", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:c05ca77b6a73640331abcf4018a9b7f2f3733f9e128bd96d7131ab7ba1fa823c", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:0e54e45a5adaa7cc24e6a273e25693919e92f498e42b8e136b7d7bf29be2d6af", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:629423401aaf5d0f529905a421a461d2f1d7ddbdb94020a140831f8873724c39", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:7ec2279bb83c931e6f379f45255a0727d207838ab55930f7595e0ab1e95b8db3", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:601b41f04bdd9789e01a1158241a17c7c4f937c88adbc75e9bf8875ee7cb0756", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:9f9cfd140d7b13b9679ba8b8d7a59366294db02d816d60af2e00a3fff1f6fed9", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:5d64fbf2f59d2c8ce842a9e8adf39877e41bb1d3e77c374681044aafbd662d7d", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:1ce15bea5149f786d9b714426a2870c43d01107f2e3a6bd4b5b324a166508dbf", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:cd287fe5971d71a4512ad52ad855f427c8b722cf7aec6e884ca646ca3da0df2b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f6643411d4b5fbc33bd87d4b3b1d4ea1b5d3659a2092cdee9ecbd4dd700af416", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:41bc4ffe76c9e5c220822efd68a2e55b1126b38f646b7c4016a36263a89e482d", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:fb9a0c7ec7a8627b89649e44dd9e2d6e4cf70166b2a55f6509f898695510376b", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:8b52f0bebd92342ee6b7e00dfe3e20d3a0f041badd4312b9b22b3d3ab0d1b3b1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:22ed092464b3757fcc58749af15cc33319f406db1747f4b28f74feb123969612", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:7633e1cf2075f6323862d89b5e0072681e64e41895b6caabbc8c6b18223dce9c", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1da30094d8664d16dee43b934829c800003e49304f1540e5b41f9fb12a2df4df", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:6cdf81585aeb903ef5da64551f6bde953aeb48f8623a8d416485847541b7b283", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:1af7de0f7c691873148f17453849b3dee97e78a1e8108755c1c133c05f29b651", "enabled": 1 } }, "distcc": { "100": { "checksum": "sha256:bd9199873915ce6fadfc570fba837765971726dac64a74e1ba74c55dc0b24067", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:1205bd72660c46019cfb8c3a899accaefb280f5f6bda63850ee2b508cc4542d6", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:b799553c2c0ab0abd040196142394a15d429e15b573df56edd0e150295d6993c", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:bdaf9c5be3de423b3d1b72c8bf38e2315fd58ce10ca6a58873c7d3e3a9c8aed2", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:1de79cf621df4cb04b8ee1201f38c91d8a23cfd85928894d4f9a8d3a27dd99e1", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5d8847ac4f68cf59bdc174bc1ce3688f86efbdd4a4563f701cdc74b2fa01504c", "enabled": 1 } }, "extra_varrun": { "400": { "checksum": "sha256:6c694e4be5a9d1895e17048eace0eb110c69a81ab1d1e01d59c2a075e08a4f42", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:58fbe8fa7832fec940b7afc7ffe8e4357ddb5a03a662687b928f84029d81c781", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:c821191e37683fab6a25fa714edaa75bcd7a81760fa8b547c31e40967875a29c", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:09288902a734ceef738fc904463b50798ce700c15059c70d092412b12ead156d", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:9fbdec8e421e1fa27dfea13b163cd0810d404845ee724b6f1b3ca5e6500a42c0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:9144a6012aa7771292a276576f811b7948abf4b7fe2e07f05c66d232d5811055", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:ae1f3ce0ff3a003f1db93dbbe09084b0ba32675b332f9930f23f9f5e66f57204", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:60856e056bdd9de8ffce0f5468846b00616fad40f87d38d5fa73acb74475d83b", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:8d10737fea4fe0dd3ae3725002a8f0c5889a3645ba4894e9dccec01a3e51b3d9", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:260a661a05f5958d32eecc692d9d5350d51ec0ef9e9bf29aad653d8637ceba29", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:e206bfbfcbe748672784fe52a91a1220965bcae5ff57dab458ade953f0b17b80", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:8826b12f85b02168080b03dec5eef5c91283ba1ebf8370022a71170064a97dcc", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:00b8b8e23b9e36087646cffa7c5126b0a402ac38a958930d27fd058f78f67987", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:181e899c092e42a648f7474f936d3413769842e4a0192dbc91cf587cd1547ffc", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:54578edd17537e1639df33aa54a731059844519c32cb8dee24e31b29f499dc67", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:325a80a2f12fed84077e57ac8725cdbd3449114115ac74904280c05c4d9f1597", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:9ac486b2d71758e95a106894de9c4f5b21506e07caba5d3753964556cb042fab", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:0a0e0d24bb9866726e90384d92166829d3c43e6086613b425735544745295adf", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:cc208709ab1c0862004f9576e53a62665826c6cdb5f443eb463d8743cc399769", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:9505b4010a4aafa33b27c1a73f02f7fb2ff720e95ef943b40db387b893b7499a", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:a1966f6618bc0d636a87d83d852abba0b92bcb8aaafe82837b39958954490ad5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:80108836908472e7859b47ff8ba90d2c629f02666a3246c2dc7e6039ee1dc099", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:42e7cda751258014b8bf2492522d20dcc0a1c96027d8261b7996289ad136ee7d", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:840c649229032dfd9b5880f50fcd371e5cc4c87fba7d424f03f3f5f28cb1f686", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:ce63d6d0ffc035614b61d82eae48a44485151cb6e93a0617c782116187ab1ad3", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:3b3f4538fdffe23885b90ece09b6859afc8a0b7f3314b9b4a60bcb9525776725", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:8184e98e265b9082358f87a8a715bf235f96c31008e60541b742525e7f09bce2", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a57b0a11f54bad916a170bf890b15978ad925ccc5e976d9d7b94b6c66f7c2e83", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:fc4a2c076ee26500d58559dfd29fe267a6f1ec33515064c8daa16448b7aaca9a", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:b13286a614402a3538fc0387f3d7abc30085c382a33e83faed9be57f33b63f45", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:37d95ab4a25b542db931edf26632d35e3a969239ff1de338b037e2e5ec506fad", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:1eab1ed96a9f87898b99be5005c598d35dc079b1ab5a7214ceb6e3e5c50f8810", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:6719dc568ff70220e53b2f1ed86d9a395a2f038d99901396022e4dc63d4ae868", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c280b017518cea08d176260a60012fd4d62882dcdf6bc9fc2005c74573b2240c", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:a6e5ded6ba1592d16d507e4f87b6078156d99e9554184a9912a3a91819ebb5df", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:a90844f8b8a25de5abadb4887f1b1ac84367f5ae248d9213a90a39859b3e5df3", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:40b455ce92e388b7f1eb0c65645000ae54076221c2acce0fa34c6f8d29d6ee67", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:392808628481e796663a1b99d1340efca31995d4832ec45fe71a939f12c117e7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:59557d1383fbb0a9586e18a4b129912d3ff989dbb853ed29bd0e27dfc160351d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:c850d134886113631f28665513a0536ca98fce16e53a9b3f146d1449ae9e0ee5", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:208231fcd39727d36f759dca410d8675e5852b7330f966aa86dc6e37c9abb22b", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:593cf420e0ac5523489f53d4b0cf2af0eaf8821d841f947349963159834a764a", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:630a305bf2ae45b8211c97cd029f1ae4247e0a00f936d8595e3cff59570cbd5f", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:104ca47441ca07c42c5e4770c1eae2178d2cdb880a174581032c7f846a05fb6e", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:b0baf75f1edb1c27f1caf49a30874604f82791ee1b1c85c38a06195f8d806b0e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:ba9aeb152542b5bd253d5a6e3b6aeff3e857615f4f42836c19098d45263fb120", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:177e6ff2bd9b8e6800b6138497d26b5cdd005046f6c62f672ecc66701b1251c9", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:32c9122d027bf6229b8cf18a4d45fc63e38c5b0a3656312854833e4342e0e608", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:42c6066d4a0751cb1db4526c055b0527a4d9403b45794571ea0dc4c71a666bec", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:997985873de7774ecab07db71db7974723494b65a569e2f852977c25d381359c", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:80496dfdf52576d83029c83097446766868b289a06aab9e9df110b733594a98e", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:c739061ae87ecfdebea9afd0b8021aa3ea154e8e1ef00ba148c82d225ee0c8d2", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:81d97ceabbc97f1b524d3e0e60904f5225fcc44996a83d9db67b7ef3d8b18075", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:8eecfbe8b3b75068c3c26b6fee1cd79009098d65b962b8a847438e8c31e9d053", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:2ae3ef5124e180523c5f610cbd536ad55c7e0b8e7c551201c29827e59c7c1594", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:34f943a522e251615c58df783c4ace2086a1752a3b69e5cbfef2ec5d42234da5", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:a0a2baa7b6c1d5ed5e5582f7ffc7d5a8cf2d4e7d034f50b1f3d0972fc9674939", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:78f45331782c43239be7330f5b928d9dace6b3ebbfda5e07c1374c462fe06923", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:41297d28af002c4e97c864d3b5ee64f49519b4db72a71b5bf7cd104c2b05af0a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:2d6c154dc940a2c178931902f7e0c0a1e9f9956055f92fc1bc92b1f2143a674d", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:33d8e3fbc9f8f48ff7a69685721a782c9f8b62bbbd1878e9bafefad5bdcf51db", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:653fca3667c90bf30da196ab61d79ee5afe1ae9703324b2512180986eec8d6c2", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:949cb7c7b62d17c998f63d9970d6fefbf5b3d56d65f729bf21a4f6703135e3f4", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:c1e22e4778b465a08d815aaf53d71ba28122b061bef976f522a2304366849a2d", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:a308db644962bd0893fe1b8bc6571460b377f728ac28632852ca3b9c281ed74e", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:9925a9acfb6375d93a08546a581a90375ee8582972cfc9d6884204d538b895e6", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:0c9136b18fb83249b1dd825fd497435d852adfaddc9d618ac4d269843a458317", "enabled": 1 } }, "ktls": { "100": { "checksum": "sha256:f15a20f050208e43060eafa61f63a8e722792b76724c7f2fc44c856879ac70ae", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f2322f689c55de691d98651af5bfece0b87608950ccd1a92e9225cfe47415851", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:454587674794c66f8b25f9e90154c291e81f6ab93d7c8fb3107068cfcefb797d", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:4d05909abe38f75a72561bb28fb279f4771d6886406de5d4665111db56181972", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:dbd4d9d61f7e57925f7a61e0a42d65273d8be168f6e3c77b5467d7b9a93817ff", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:3121357ab50a02cfc634a5fe4250aff89a1418865918569b77a10cd333cc0018", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:3390d25acd3ece1c7404db8c3db0f5c80278d5063fab9c8f4a8bb5584b5ded16", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:bc457c7839567f5943e06ec31f915742988f5e602c918a3a0d46bde5b94b6c78", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:d369ef834c0087ca09871e4dff0128cfc8e39a97e1e3b5bd3001fd752b7af5cb", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:c739c49825488aa1ae74fd218a5718aa3c859cd1205a1ea581710fe539bfbde6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:6a59e4d4df92e3d73d66b34035aaf00f5ca0306da24bd478c72a39c7e7844960", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4196d8e4db83bd37b4e883383dfe8543fb33029b42c557fe5af7e8475b558584", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:5427ae01212227c3a719cd1e5664c1290175bd574d7927903102147fa51989c0", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:7d1a24bbfe8deb3a3d7aaa92bfc9c922baba1476561b92f828aae226fe9dc3c4", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:b772895524eef04c9c79093c837e6033beff39717343d76528a8a85e4a466bb6", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:5017fd004213b4ceaf374bebf74e35a0084faaf6cede37b78769036a05e34b9e", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:7c71eef6360c66869a42a19a34ee30abc1064de8fbbcec0098d2ee57fbedb79a", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:cf5a647f3682f454b850317643416460ce6a7710f3f5fec6b0deac40e3c72e07", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:067389c903715a12a93937a436e3df918c42a4871765668bea50eca4f02212ba", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:6cffe11f14b5c03ba0969f0a3f476455cfac505f2cc1f2d467222a21a3ed7c5c", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1ea9c32ae0a7becd1e1879dd4c4b367d450b2721dd8fc3f771081d1568b450f5", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ea5057da646444d5450ff16e5dcb82ab338e8fd5fcf5f8dd72e782ef18ad1031", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:8de073e5cf69c58d03162e50f5fe7537ac8f90c81f02d2906cb10a910a414ec7", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:7d0336a428c29ae9a91c18857f594a16f74f5a963607fff966e7de78102ff76b", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:0464738bfa038fc9ba7ce06c15abf3ff5c2113083e236dd8b96b5d85b1fb51b7", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:9489c6c732b353e34ed3e5624fe8b73c336f4786c47bc30827b4a5a59b7dca44", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:660ecac63132d47b51afaeea6f55f74e3a6f25141a4d0d28065e094d7cdc6c75", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:b0a2d9c52715e340983df89e8adb304ff3790b2564659fd821843a3f172d46d0", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:04b77283c6d821ca98ecb58ef7bd17f6f185168786887a67f4c71cceeaa0476c", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:ff9433431cb560a4ff03dc02129289a0f78d1909fe1f3954347f18e318c3cdc4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:dc069f3a6c78dc367c39cd7e50fe17948cf9877f3e306f090f1160b07989d503", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6890958fb0f7c357a4a9600c34e21bf6fc9fd8ef36e9a5ad516b3bf2c1d88bd6", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b61027e2a84c3f6fffbc7eb3fd40788bd9dfb036b3e04a8f77d233e10c9f2ec8", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:e08540cc55168dd36811b1962936ffacaa21be50b15b9d5d34fa9d55dfd125d8", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:bd730a6479baa42060a62b9c7346dfe21ce28e1a8a432342aa5f302c2cf8ef86", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:01131128229571749a7f5df2e65e22e9850789bfe386926cb34e91153ca9e88c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:edb0f4d496b429a2b09ff9b1d74bd30126b5ee2265a4370f6e992cf9d696de0e", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:b28911955f6731646cd779f6b89c2255238c3e60e1b93d227ce588484694f755", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8bc2fc39e9a6cef06df178607ff3e17604e86d709575d37a60de5c1fd2b9fead", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:6980bdebf1af99aa6822dc970cd6d5a5b430381aa11e96e40244db39265b5e4f", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:3b235676dff7abd25b2b57fa770833d05561bdd24216f4de1202e9ced52a4f4a", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:33be40fa2b50df5f7234ead34a6471ff1eea62de62445e509c28e5bc8a730364", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:0d4fd8a1f74c8e46c18a93794b305dcccf3d50e9db095b659d996712e2905dc0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:d4f61bea290cce978cbb1653866414f9f848bc56ee6491cf022e9131dd2ff5fe", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:6f174abacc65b0de9248c39a31210eecb6fdbcd15ecff5bc254fb0d366f83806", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5053d74b0f4734131234b4faf6cf7815a725bfd5b73b6acf07deb77a3cced1e2", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:0538a3f6b5c469223bfb2740d7365838eedf7ef65b89353645e9d3bf6e17253c", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:f8b11739918f67700fbef58c2ab5c87a61413acf6aa8b650a014285c0c3684e2", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:a3b7c308fe73bec0edcfceb85e1e1799927a4d7e25ec4314649b447f670a49ef", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:dd752acc5dc10414a4708dc0bc655d7861bfa74bb20863aa10335dacc53357ba", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:bd4724acfb4c0ec9283595e24e29f9926c18e7af0169fd5eb344ed00de6bf393", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f1e989b744c90ee0be0978d34da65a84fdd81e5b6aef8ba116560bc157d73f0a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:d2bd05813a6a5257688f9bb486a1bda49fb169eab4f16c3d503e01883c52bd11", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:03597af2e3a916f7c4eb83e1b360b24cad9e86ce814494bd68da602991a70e7e", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:66173ad07abd0c8bb7e529350399507549601923afeca8e2ff2b0f80cb9992e3", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:3399e9663584d6d1032992f903b7aba4f96f4f0b7a5971faf90eb816cc7655b3", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:c1107cdfed17e78cabd9094b3f6aa1d9537f70bb4ddfc236983cc5fdc167e8ca", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c73d5f710032819a6456d1020ef5fc8bb683aeb167b6169f56a295c31b14c72d", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:d733f8dbbcdcfa398f6f139831236fa6cd0abdf132090435bb647081d2f6a785", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:44657ecdfa5bc1235f85a50222e025ac4721b24a01af6d167525f7cb0a580c31", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:92ded69a63e7ecda34b1d8ef17ffae8c9e8075046a724f8f8242f4b66d2eff19", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:5dc833e3b3dd31a1af446c7883f6a2b92c40b9192d072ef5de2fda7ddf4f84ad", "enabled": 1 } }, "passt": { "200": { "checksum": "sha256:d778011449f026622cc05ab496a39b6aa55a7e6447621a5ff7afc242b155b0e2", "enabled": 1 } }, "passt-repair": { "200": { "checksum": "sha256:7db523cb1e14c32587544907a28237c09c418307c349a9c6c5a0095c9ef22533", "enabled": 1 } }, "pasta": { "200": { "checksum": "sha256:cbdee1f9990db7defe1393b55569dcf01a84786f38a49e923b023c7c87bc2571", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:924bf0bf4f0b2ea9d633ef46f55793acb2eb3da6379bacd355814507e5ddf67a", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:8d6835bdf52f73dfd1acf73ce13ea8325b0bd3d0107b0ba86953fe2fbee20330", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:016a326cb4a747756723c0e7d675e4992e8abfd1f51a6c06aa93066bf45412ea", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:ee292c9774f2109ffcef5b2a1ac7ae68e44f719ba40d155f84287fe03a6c01af", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:5d77621f8da0f789c1b9ea9ac24925e02e0a7fe2a3a26cd7e5f46085277041bc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:6cfcf3051765f61e954cd243d3b652cee14d378e4925b12569512e5ae815b40e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:07669cb2df2c61ec4cb621f3332f77f351facaaf5232a8a72c61a5ee7bb44d71", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:24e235787e311d82b99df7b41d724da0e18edc3bc6443f9f83f8d6247e33cbac", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:2c0350e46ff4eb97af27f63025763c565d7097457d4cde6f46088afe7f8929e9", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6c7d4f4b8227aa55a5f142bbb8faef130cd10710101eb6f0aacb62547db5f49b", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:d59109d36dd2868269eb18631e37feb5981db0aa780c55f7e0fb66d897e4f48c", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:93a95273e16837c24572e635d58446ed1162ecbfed59695e866058df4dcbec2c", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:f878b2cf560b4bdff33fedf8c8f2011af390b77ee8f9416fe93ebf46153c97d0", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:7c128725a61bd30f3e35f39b9a832e5cd3ef435dde58241616b24e28f67ffbe1", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:60153b9f850c92927ce2a61becd9c248ef56dc0ceb7ba990185b98eaa9b011bd", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:ae9f1c81d0877b9f40c9d9bb5b862b7c58c73da9045f850a0a72d1b982fada35", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:8d550f8b9e80beafd06bc1392e60ecba8e922f8d0e609fb6674de5cf27c8d772", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ff82ca8bf6365948aeaf3c14fbc7ea9a212074d1462a31aa676b542d0d76c882", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:664148c3f8d4a649714cdbcf15e4862a5e648e0aea83d4530d23866c78c8d8d0", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:d58fb38422b37d406bf3e79136e3a94a40885c08f9c1591975c9a7495b7f606d", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:8194c7df0ea3abd18f07481b0181e01c5fddb21ebb594ed5b20bc1ced555fb27", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:ef1377e6864d9b5049866f6f0c3986e474499f1bb0082e9430f208e2c9d84b54", "enabled": 1 } }, "qgs": { "100": { "checksum": "sha256:add48a13d9b3cc5c82c73c2ca7d72db10b074970c14e26d58b88f670f9221655", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:c5e1779123c640fc55da0871bfd96bb124d8c9b50b9065136c025c83364f453e", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:71a7ff78c03cde811d19a4c115de8a898007bdf437a9350d4708b3f9142481c6", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:e66ffb20855170cda4ec60840ce05e73d69dcc54330c86b24dd89ee96bcd1d73", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:682232f167f6ecaafcb051df5557addc52b814e923f143bf37a2035fb17315ae", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:0fede9cbfe184d19e8ac7bb68a1ce8a110aa45898ca782e3c9daa5649a476fba", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:01fbaabbb5b83721fe19a813401d94510f6fb260714c3adcc40d54fbb994ef70", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:a8e3e2b90df3917dbaf684a1bdf72432d8bf2aa6ec41233e06a2eaf02aa81686", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:8d5ee75190133ca16f3931a80ba1202b6cc171e6a3b1cba6dc5788a33bc84e0a", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:fdf6e82be7b620aaea9c8928edc39344d32dd9b1c4e0f78a6c6fba39bc005b6d", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4788c42c425e54a8dedb4882a6a2bd2183ad72f980f4217299be830afe275069", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:7d65968a2e3d186de718f9f6604f2cce60bd08bab6dbe0e60f60222b228a5744", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:78d9abb7263a5c028d7065c0cadcfe14daf3b4aa064e679458f3bf271a69d2e5", "enabled": 1 } }, "redfish-finder": { "100": { "checksum": "sha256:e05fc89dc14e7a723647597786aa62adc255ca1301474ff0c29dff49e4176e4d", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:825a97c385fbcbfff670278b26a17f91bbfa8585f2219efc48781e0e510bf213", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:695b31e12a82435b57e11459e99444fec8d09aba051b1a12b8efa765608dc719", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:892885a058782b7fdfb5d86e5ec3ecca261363a14a2254652c6a7ff8a52807ae", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:39bc17cbd08c0377eb935fd0ca86b6542752c5ce07cb0f9d9e5d8adfe4306a13", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:3da6785a2c37296fb1ba2a1b621ebccc9e0837d9acf69b3442e75f3a60f2a484", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:912bf2ea73ebbfd1d5fefee37b336a9002345d01f8eb54cb164c28160fc4f1c1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:66b1ecc6382afc5032df2921281550af0431befd8cd517c4f8c68cab2eac0e11", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:4ed93113b5ea0760e89533919f86cf1dd26b5587a9d7cf8bd951896fc77d7fa9", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:008a840aa2183d0fbf1b3f3bb9542a7ba51c03a1e3a415b188ca49d2e4ed7e51", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:3ba51ade82ac9113ee060bb118c88deccc4a7732312c57576fd72a70f40154aa", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:b4fc4fbb8572088eb785b643f5d103d5791af96d37e6cce850d671d9291bf70f", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:6b4e7757f0422a2c54d93e920ff7b2c5bd894d495065b3827a741a768f042b18", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:702d5df73a6865bc249ffb537ad7a0d2388e1540716e4b2f7e844485870e37bb", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:4cfda0dd9868ff0890c7a612f07c282a8cbe4a319c766d7cf842ed639fc2b34c", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:64c59a71e1786fba000398e05773c83fbbd9f92c0341e52cbefd1386357b4e16", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:2f0c18590911b20c58bbc9db0c9c0c471f4d66171f7400079a2e956366580e24", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:f19a726a7c78ddd9aafcf8d2c4b6a57bd05fdc8450a91119e1f0d0abc09151dd", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:b29d987a469d59767e7120202e2abad06865eaa84d3eb61d2ae6b7a78c1d6dca", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:44e8808dad842eb55d51c204374ef445bd8515701db580d2c91f06ca9949f2f6", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:4b1585496c5777fe140f76f11a62df0ddad219336fac090139efbc368520d38c", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:2a990092d1cf38541a49375e9e605d82515a34e19b9ab6b70392afb596e0c612", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:80bda9a30a4b5ab4b6b14d7f6c92efbfd5a63658a4b44565a02c2c552cf4a28c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:405780af5278be0dd7f89425f91ca1c48527743d2b6876bdbdcc7545d487dc09", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:f76f5b094e42967dc240e161cb187bc528f2f2a3ee2ab93c53c0b15d820c0921", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:99c31c501752dfcb8460f44b4e363b9d57b85c3ad422a951f13f2d42e5f9f54b", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:8361387196f6c48bbed95c77561bdd324ab96356d6dd0f4874832accc67738a4", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:89169ffed763d6257769d5ed83185a9eb376145baa60dbf01b4088f37aa663bb", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:7727a62bcf612392c76d46f3cc8c22f33c3c87c30a320805ac9844ce68409ecf", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:1ad633f30ae0f80052b31090652780dab90b10696c098ac81ea831035a652835", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:c9cbfb3894148ab693f0c850232f3a1b1aefe5c5cf5f4a06bc74d44cdd2b52f5", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:67b8654cf2404ad763f5343ad3ded35f198c26e99b8a9a150143911acc89ac6c", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:6ce5485715b3caab30a72313601de971e7118bc2997a2edf6ce7b229e51c2483", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:9ff7693f6fb994a0a53dc46230b7ce6c4fe6dccc2b2ec2c8ba49f7c1e3f24eea", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c888a4b5fc698c1bf7551bfbc6d6ea7673a5f7f41d2467af7e15ce634c71e2be", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:1ed05c5ce069437c9de8a57326a0329d883ec753f3a11fe4f70a43ad212ec482", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:191a531a60c27b33fadbdb48213980f03b68efec3287545eff3592fcdf4bf686", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:e6f726edf701657c80853712b94a4bf5dd0430254d93db45804e60a243c51818", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:8a6ef7c3d8ee76e112224e0c4e0b91572db8c85f547bbed6d7ce3f6f6d4383de", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:cc162915cf1fc3cc66616c3224e9e848485198a28868c237adc9d7077791cba8", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:74b5c41b13bd849ce82040012f557fec4b9cfad3a9072f9f17f78400868da558", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:91acb71305dfde220ce7574e2ac67af16e6f8630639dc66d494cbf8120d2d07a", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:9b8a5c1ff4c21846701eb5e0603cc022f4530c568db6d9fab392e41c0ed64720", "enabled": 1 } }, "slrnpull": { "100": { "checksum": "sha256:bcf004c239b72d23fb4f1e5842272bc20f287cd312ed394464db8cb9218f4377", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:fc3eaf23ee99b98d2ff17a5df04776e8553f490d7f57d49a24061cd49bfaa997", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:17d8fa5ce4b9402dfb10ad431241cb2a5a1b2f726caa03ae7f1d7d410c2ab6ae", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:6506687dbaf850c784d6f2af14197d3c1768514fad98e08fea69e92a780ff65f", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:59b6f3643d2f404ef03d749628b6872fd650b5b10851862b4accad8276bc6f29", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:34b45f69552f2b284b1f6e0876e4a96d1c05c28e4ab42d2bc2a241c03fa73309", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:35ef9c580c4071208af6169ae1059bfee51938d36dbec2bc2354d51ed5dc505d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:5594f07c04c9057b74df1612012c2515265ee04d58b11bfa46a73531b703c1f7", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:b00a50f92d0e8ef2789d03756c7bee69f983edfc4a3f409304835ad25133e3a4", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:874410d4edbbd1f73ef0e69ea40e93054a5d65cfe1556b00f6b474b928400a39", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:400e9b1c9ace97d2e43b5916b453d189a5c6f60133876f15672a48607edfd0ba", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:66beadff1a4ed7e48b3f3cee1444f5f1aaa833d212cdc76068f2f306b8455970", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:fd8c0b8cc073d8025ab8754b7885e0375b4e700dd3fcc921c45666829b652de5", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:1b2a0e330daa04838742fdcd50a9b539072c58d48e949e4a3ce7933da47cbe3c", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:2ab07a8deeb7ef4cf09f94bd2ba250166a4d016bd9c581ddd470ab2784baf5e3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:e7caeb60df6f2002f7be4adc7a1506b6fb585e6bb9f4585381c115a90bff4a15", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:836d01ecc314a2b2b4eaaea69ce1e4a03f3274bd8bd25e2b64d0329e6f9d8f32", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:e2c86cd06c00d3ed79b9f7a602b18593d5929156df58e761a04a3cc3ba8be891", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:67fec37a17724a9b059f936b70c199d96906b9bbf703dd8a1670852dbfc7715f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:dd116a718e125ba88d28936b746a2292088080254134d2001084e2d252ce9379", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:df73dbc3f1e232bb5f4d3ba0bd1850eae3c3bc401508b1819c0989b8f67f8033", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2eb63b8ac8f3038eb1ff3bc18fc5923dee4ac3f609d8a14791300ae835249a9a", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:d342a188298c1fcd4df99c4235985c50ba2f02a4e53d01cef3de48bc31464ceb", "enabled": 1 } }, "switcheroo": { "100": { "checksum": "sha256:f8f67d2c990489a09a436dbd72704b13d6617fdbbb8c5c2c040a85b584de6a7b", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:a8f135ef10becc2a2ffd4e7faf89932ed4aff16331eb62d59e52ff2a5c0966e7", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:fc1ca3d8b12406dfef9f012c9275817169fbfafc411969e60d357be3b35835a8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:ab2acab6cbf273ed7e78e577b0e2a85225adba387b1a8908b180b07adb950e6f", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:815d229f0b5a8f8a44cd511b5927febb002596a8aad1b85406d674e59378a0e5", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:2a643246c63d64d4c57f3877ff3daca2637b195330920c2efd840ebade3fc20b", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:f3896d2de3794d7dd54fea03cbebcdf4e6b63bcc512d2fc14433b3be400f4188", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bbfd79953db88f6db10739803d29b003d83311a21c75604d64ed9fae26da541a", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:71c6423e6318342438fea1ba8a38751b5741b4482ca8ed075dbdd36bc6fda9aa", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:f482585c8f26517c6ed8e9203bec4adadec8ebc65840089d7483e31ee24fa679", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:a5312c216b56620ca8e69679e99275e793b3de9b6e524db1a5678d22b9909056", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:3a4e10afbea76bb0a825f3e10b6be09c1e380f19737aef7a6171a9744c15b33f", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:58aac19837bee6fd1c5e3d1e2a9c9900c56b9aff34b643fa9d958399152afbce", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:46f7b10654f710546a61324618f68b753849ea0b6a7e11f431922a5c848fae89", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:f3d5b0012a6f6d0255e831f608cf0d77f1af38a975b222a7f71cf0821f359246", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:2d749a0f3d39317412feb3388eec0eacb60859891ea7da50373271f03ab66c5a", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:5b1a3e31fee719423530b8c7c07b6649ab539d38f2b446a3e6d3f029a65696ae", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:561814e9fa4d9ffa1be3bcc8e27ee1a50260293a17de3db6eb9d4a83e14e8faf", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:48fac9542e02d0c8f461e03905339795331b4fcb2082e830e83189e50af59040", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:80d84cb83923e4d5d6b9870b4311a67c87609f010c5ffcdcb00ef6e926a8d785", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:33a8bba7a36dc094b6220c0dfe282a9e57ff280511965c99d654f4e584f960f0", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:38e42ce3f0baba47216f3b50d7bec9ac531a11d659c8807d0bb43b5e5b4ce873", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e9267049c61e87edd481214c8cedfc02cb396789c52a150b58d8fbf0401bd455", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:2f55ef3a5145328ed09f316753cec5b85f67c1b43902be5152fc57c4b95c3026", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:51ec0952bf860ec23e3bfdfd53f3bfad841a4e5b560cc25a9548c9b207504194", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ef06a218a285a5a01a1e354d6a40f826815203dc323d00ad68e29f85162c24e7", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:f71781a997aa0d0df5c9baa600b6212105c75cc290bf634a198ed0d5b42a668d", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:f58eadcb76889082e3a109afa993bc7eeed39675991d171a13744bc8b61c279a", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:4b8e317234ae08c1f4a80133c8abba35d412f5797db3c4515d0cf051c35af6bd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:3c2a65084450b2459115a69bb1d382e452a1da63080ac7fdc85bcac36affe1c7", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:ca220cb87bf9790b38738b6f08cc800a2fd0e083960aa4770c9385b897cd31cd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:cfcecf645d2d8a59f98135435d535133a39f70f46d9b47a65b15e88a3805861a", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:91a33317bdd39510dd305d768e2791d08b207d8384bfca22322ec49f5b26f9bd", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:c500e8df08994b81cc1d743db684060d03bfe4465fc12eea9a4af83a69af307b", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:db1d0917d263b447f9a744edfd4ebfeca697182c853295c7eaf49f1270218858", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:84679e67832759be8220885abe3fa0157305fc8f50efa604b1343e99907925dc", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:5ca3d53e3b62d5973442d210faf9b9f5f9b5f4935a74074ce4b18836c8d78b19", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d8fadd99af0d343c815f006330529911a5106641ed9c7d22a2eb72e0d9d55d2d", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:664ab4aa1e1eca422d2c627a22a9631ac348221893713bd9a4d97a628094b1b0", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e68a71817476b5ebb8ae2e13e9ea9418a31dd64ffe4e156258cb77029635cefa", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:f45c6d89a3305814e44a05c0d8c8f8a4ce8a923d721e83c9579f76d8d8cd909d", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:8d828eef8065f2486b815aea04ed491419e3bf17508cf0ce595fca71f872ba38", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:76a11dd14f578f940e874ab4d68ca1370ddfcb2585b6a3a955569fadb77d269f", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:17759c6e3a6229e4a40be0b8121751d768f00fd6ea0a872f4fe65bebe2280b30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:c9c26249a11c4bace4efa998ae826c3cd5178a19d323886a62b7e355ca3d8260", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:ea826918681193d37db69c814ee4c753fef3fcca809cd0fad6f924f829eeb9eb", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:a9e221f7f656f9f0b4937c2bd0f7b93124c7f48f4c88fe8ba608db1eaa5f05d1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:034bceb856cf79ac9329a4affb6cc53cf29c5bebb089c0ddd486a76148812b89", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ea40fa389e6fc510f40994b9b4272a6b985c80064b8a4d702d5813d5252487f5", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:308910f855a076bdf38241880815f6640dfba4b21ef1be58112deec3ed858d16", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:dd07546e8a114e1b7f5056d4c5b0f1256050fe93e867fbbb6c5f52d2c6f77ec6", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:870a818c9c3a4e4d24386bfc3fc7565af1c8aeec605b3d4cd819169172bb3e03", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:476c08aa43723ad6bb98a7254bc6cdad6ddab4aa63336719c192bbf6f5ba6700", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:e27315e58a548c06561117f2dcf86c67e6937dc1ef2071ee612975457091e40c", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:a077f44cc6d16684de9a93061ee0f7b212e3f729fdbdf594dee573fe5c30817d", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:8228eda847eeaa7529b089edb8c64763d03100e84117526a67fbb41ea006a2b0", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:170 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.085) 0:01:00.699 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:183 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.027) 0:01:00.726 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:191 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.021) 0:01:00.748 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.030) 0:01:00.779 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.021) 0:01:00.800 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.029) 0:01:00.830 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.024) 0:01:00.854 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.022) 0:01:00.877 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.118) 0:01:00.995 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "state": "created" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: nopull\nspec:\n containers:\n - name: nopull\n image: quay.io/libpod/testimage:20210610\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.036) 0:01:01.032 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "nopull" }, "spec": { "containers": [ { "image": "quay.io/libpod/testimage:20210610", "name": "nopull" } ] } }, "__podman_kube_file": "", "__podman_pull_image": false, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.036) 0:01:01.068 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_name": "nopull", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:42 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.035) 0:01:01.104 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:10 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.041) 0:01:01.145 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_handle_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:17 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.027) 0:01:01.172 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_handle_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:24 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.028) 0:01:01.201 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 07 March 2026 11:44:15 -0500 (0:00:00.047) 0:01:01.249 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901838.6646128, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "9117e8a5afa3220d98f04938893af461a8e3008b", "ctime": 1772901831.1052737, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9335075, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1771804800.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1635770157", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.383) 0:01:01.633 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.024) 0:01:01.657 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.024) 0:01:01.681 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:73 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.024) 0:01:01.706 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:78 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.023) 0:01:01.730 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:83 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.022) 0:01:01.753 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:93 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.023) 0:01:01.776 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:100 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.022) 0:01:01.799 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:48 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.022) 0:01:01.821 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube is none or __podman_kube | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:57 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.025) 0:01:01.847 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_systemd_scope": "system", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.033) 0:01:01.880 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/etc/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:70 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.032) 0:01:01.912 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/nopull.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:74 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.026) 0:01:01.939 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:81 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.026) 0:01:01.966 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:89 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.021) 0:01:01.987 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:93 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.019) 0:01:02.007 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.083) 0:01:02.090 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:13 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.036) 0:01:02.127 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.022) 0:01:02.150 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.021) 0:01:02.171 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the host mount volumes] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.020) 0:01:02.192 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'volumes' in __podman_kube['spec']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.022) 0:01:02.214 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_volumes | d([]) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.023) 0:01:02.238 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml:2 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.038) 0:01:02.276 ******** skipping: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle images when not booted] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml:25 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.027) 0:01:02.303 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_booted", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check the kubernetes yaml file] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:32 Saturday 07 March 2026 11:44:16 -0500 (0:00:00.020) 0:01:02.324 ******** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Ensure the kubernetes directory is present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:40 Saturday 07 March 2026 11:44:17 -0500 (0:00:00.373) 0:01:02.697 ******** changed: [managed-node2] => { "changed": true, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/ansible-kubernetes.d", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure kubernetes yaml files are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:51 Saturday 07 March 2026 11:44:17 -0500 (0:00:00.386) 0:01:03.084 ******** changed: [managed-node2] => { "changed": true, "checksum": "d5dc917e3cae36de03aa971a17ac473f86fdf934", "dest": "/etc/containers/ansible-kubernetes.d/nopull.yml", "gid": 0, "group": "root", "md5sum": "1eceaf0da0bbf69a778deb11f0449417", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 217, "src": "/root/.ansible/tmp/ansible-tmp-1772901857.642029-10578-203368904599909/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Update containers/pods] *************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:63 Saturday 07 March 2026 11:44:18 -0500 (0:00:00.709) 0:01:03.793 ******** changed: [managed-node2] => { "actions": [ "/usr/bin/podman play kube --start=false /etc/containers/ansible-kubernetes.d/nopull.yml" ], "changed": true } STDOUT: Pod: 992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09 Container: d4fc0055deaed372cb505f1296fe8e33f059d4ac3adf6ad0c54243b547cbb4c4 STDERR: Trying to pull quay.io/libpod/testimage:20210610... Getting image source signatures Copying blob sha256:9afcdfe780b4ea44cc52d22e3f93ccf212388a90370773571ce034a62e14174e Copying config sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f Writing manifest to image destination TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:72 Saturday 07 March 2026 11:44:19 -0500 (0:00:01.474) 0:01:05.267 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Enable service] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:85 Saturday 07 March 2026 11:44:19 -0500 (0:00:00.030) 0:01:05.298 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:98 Saturday 07 March 2026 11:44:19 -0500 (0:00:00.032) 0:01:05.330 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:113 Saturday 07 March 2026 11:44:19 -0500 (0:00:00.033) 0:01:05.364 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Saturday 07 March 2026 11:44:19 -0500 (0:00:00.031) 0:01:05.395 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Saturday 07 March 2026 11:44:19 -0500 (0:00:00.019) 0:01:05.415 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:205 Saturday 07 March 2026 11:44:19 -0500 (0:00:00.017) 0:01:05.432 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:214 Saturday 07 March 2026 11:44:19 -0500 (0:00:00.026) 0:01:05.459 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Verify image not pulled] ************************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:235 Saturday 07 March 2026 11:44:19 -0500 (0:00:00.036) 0:01:05.495 ******** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Run role - verify continue if pull fails] ******************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:239 Saturday 07 March 2026 11:44:20 -0500 (0:00:00.060) 0:01:05.555 ******** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 07 March 2026 11:44:20 -0500 (0:00:00.096) 0:01:05.651 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 07 March 2026 11:44:20 -0500 (0:00:00.059) 0:01:05.711 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 07 March 2026 11:44:20 -0500 (0:00:00.051) 0:01:05.762 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 07 March 2026 11:44:20 -0500 (0:00:00.038) 0:01:05.801 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 07 March 2026 11:44:20 -0500 (0:00:00.040) 0:01:05.841 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 07 March 2026 11:44:20 -0500 (0:00:00.039) 0:01:05.881 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 07 March 2026 11:44:20 -0500 (0:00:00.039) 0:01:05.920 ******** skipping: [managed-node2] => (item=RedHat.yml) => { "__vars_file": "RedHat.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "__vars_file": "CentOS.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "__vars_file": "CentOS_10.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "__vars_file": "CentOS_10.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.podman : Run systemctl] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:52 Saturday 07 March 2026 11:44:20 -0500 (0:00:00.096) 0:01:06.017 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Require installed systemd] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:60 Saturday 07 March 2026 11:44:20 -0500 (0:00:00.039) 0:01:06.056 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:65 Saturday 07 March 2026 11:44:20 -0500 (0:00:00.038) 0:01:06.095 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 07 March 2026 11:44:20 -0500 (0:00:00.040) 0:01:06.136 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 07 March 2026 11:44:21 -0500 (0:00:00.998) 0:01:07.134 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 07 March 2026 11:44:21 -0500 (0:00:00.022) 0:01:07.156 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 07 March 2026 11:44:21 -0500 (0:00:00.034) 0:01:07.191 ******** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 07 March 2026 11:44:21 -0500 (0:00:00.024) 0:01:07.216 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 07 March 2026 11:44:21 -0500 (0:00:00.035) 0:01:07.251 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 07 March 2026 11:44:21 -0500 (0:00:00.034) 0:01:07.286 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.021466", "end": "2026-03-07 11:44:22.126906", "rc": 0, "start": "2026-03-07 11:44:22.105440" } STDOUT: podman version 5.8.0 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.407) 0:01:07.694 ******** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.8.0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.069) 0:01:07.763 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.029) 0:01:07.793 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(podman_quadlet_specs | length > 0) or (podman_secrets | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.024) 0:01:07.817 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.017) 0:01:07.835 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.031) 0:01:07.866 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.026) 0:01:07.893 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:10 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.042) 0:01:07.935 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_handle_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:17 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.027) 0:01:07.963 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_handle_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:24 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.030) 0:01:07.994 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.053) 0:01:08.047 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901838.6646128, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "9117e8a5afa3220d98f04938893af461a8e3008b", "ctime": 1772901831.1052737, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9335075, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1771804800.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1635770157", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.413) 0:01:08.461 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 07 March 2026 11:44:22 -0500 (0:00:00.042) 0:01:08.503 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.034) 0:01:08.538 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:73 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.033) 0:01:08.571 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:78 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.047) 0:01:08.619 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:83 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.023) 0:01:08.643 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:93 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.024) 0:01:08.667 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:100 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.022) 0:01:08.690 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.024) 0:01:08.715 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.060) 0:01:08.776 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.041) 0:01:08.817 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.022) 0:01:08.839 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.058) 0:01:08.898 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.058) 0:01:08.956 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.038) 0:01:08.994 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.037) 0:01:09.031 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.071) 0:01:09.103 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.038) 0:01:09.141 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.037) 0:01:09.179 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.077) 0:01:09.256 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.036) 0:01:09.293 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.035) 0:01:09.329 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.036) 0:01:09.365 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Saturday 07 March 2026 11:44:23 -0500 (0:00:00.035) 0:01:09.400 ******** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 07 March 2026 11:44:24 -0500 (0:00:00.119) 0:01:09.520 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:2 Saturday 07 March 2026 11:44:24 -0500 (0:00:00.043) 0:01:09.563 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:10 Saturday 07 March 2026 11:44:24 -0500 (0:00:00.040) 0:01:09.604 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_ostree is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:15 Saturday 07 March 2026 11:44:24 -0500 (0:00:00.041) 0:01:09.646 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_ostree is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:22 Saturday 07 March 2026 11:44:24 -0500 (0:00:00.032) 0:01:09.678 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:27 Saturday 07 March 2026 11:44:24 -0500 (0:00:00.026) 0:01:09.705 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:31 Saturday 07 March 2026 11:44:24 -0500 (0:00:00.025) 0:01:09.730 ******** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_10.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_10.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 07 March 2026 11:44:24 -0500 (0:00:00.145) 0:01:09.875 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Run systemctl] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:5 Saturday 07 March 2026 11:44:24 -0500 (0:00:00.044) 0:01:09.920 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Require installed systemd] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:12 Saturday 07 March 2026 11:44:24 -0500 (0:00:00.027) 0:01:09.947 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:17 Saturday 07 March 2026 11:44:24 -0500 (0:00:00.026) 0:01:09.974 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 07 March 2026 11:44:24 -0500 (0:00:00.027) 0:01:10.001 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34 Saturday 07 March 2026 11:44:25 -0500 (0:00:00.798) 0:01:10.800 ******** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:39 Saturday 07 March 2026 11:44:25 -0500 (0:00:00.023) 0:01:10.824 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:44 Saturday 07 March 2026 11:44:25 -0500 (0:00:00.022) 0:01:10.846 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:8 Saturday 07 March 2026 11:44:25 -0500 (0:00:00.022) 0:01:10.869 ******** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:17 Saturday 07 March 2026 11:44:25 -0500 (0:00:00.028) 0:01:10.898 ******** skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'nftables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'iptables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'ufw', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:27 Saturday 07 March 2026 11:44:25 -0500 (0:00:00.029) 0:01:10.927 ******** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2026-03-07 11:44:04 EST", "ActiveEnterTimestampMonotonic": "560506770", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service sysinit.target system.slice dbus.socket dbus-broker.service basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2026-03-07 11:44:03 EST", "AssertTimestampMonotonic": "559785386", "Before": "network-pre.target shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "426720000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2026-03-07 11:44:03 EST", "ConditionTimestampMonotonic": "559785383", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ip6tables.service iptables.service shutdown.target ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4787", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainHandoffTimestampMonotonic": "559816912", "ExecMainPID": "14191", "ExecMainStartTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainStartTimestampMonotonic": "559787917", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2026-03-07 11:44:03 EST", "InactiveExitTimestampMonotonic": "559788901", "InvocationID": "36851f8a440d4a018e15c301930b223c", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "14191", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3015180288", "MemoryCurrent": "34013184", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34250752", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "tmpfs", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target dbus.socket dbus-broker.service", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2026-03-07 11:44:04 EST", "StateChangeTimestampMonotonic": "560506770", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:33 Saturday 07 March 2026 11:44:25 -0500 (0:00:00.556) 0:01:11.484 ******** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2026-03-07 11:44:04 EST", "ActiveEnterTimestampMonotonic": "560506770", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service sysinit.target system.slice dbus.socket dbus-broker.service basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2026-03-07 11:44:03 EST", "AssertTimestampMonotonic": "559785386", "Before": "network-pre.target shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "426720000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2026-03-07 11:44:03 EST", "ConditionTimestampMonotonic": "559785383", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ip6tables.service iptables.service shutdown.target ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4787", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainHandoffTimestampMonotonic": "559816912", "ExecMainPID": "14191", "ExecMainStartTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainStartTimestampMonotonic": "559787917", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2026-03-07 11:44:03 EST", "InactiveExitTimestampMonotonic": "559788901", "InvocationID": "36851f8a440d4a018e15c301930b223c", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "14191", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3031220224", "MemoryCurrent": "34013184", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34250752", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "tmpfs", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target dbus.socket dbus-broker.service", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2026-03-07 11:44:04 EST", "StateChangeTimestampMonotonic": "560506770", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:39 Saturday 07 March 2026 11:44:26 -0500 (0:00:00.567) 0:01:12.052 ******** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:48 Saturday 07 March 2026 11:44:26 -0500 (0:00:00.072) 0:01:12.124 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:61 Saturday 07 March 2026 11:44:26 -0500 (0:00:00.035) 0:01:12.160 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:77 Saturday 07 March 2026 11:44:26 -0500 (0:00:00.034) 0:01:12.194 ******** ok: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "15001-15003/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.581) 0:01:12.776 ******** skipping: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "port": "15001-15003/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:141 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.064) 0:01:12.841 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:150 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.045) 0:01:12.886 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:156 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.037) 0:01:12.924 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:165 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.041) 0:01:12.966 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:176 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.035) 0:01:13.001 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:182 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.034) 0:01:13.036 ******** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.049) 0:01:13.085 ******** redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean included: fedora.linux_system_roles.selinux for managed-node2 TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.220) 0:01:13.305 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.036) 0:01:13.341 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.035) 0:01:13.377 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.045) 0:01:13.423 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.026) 0:01:13.449 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.025) 0:01:13.474 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Saturday 07 March 2026 11:44:27 -0500 (0:00:00.024) 0:01:13.499 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Saturday 07 March 2026 11:44:28 -0500 (0:00:00.026) 0:01:13.526 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['python_version'] is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Saturday 07 March 2026 11:44:28 -0500 (0:00:00.029) 0:01:13.555 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: python3-libselinux python3-policycoreutils TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Saturday 07 March 2026 11:44:28 -0500 (0:00:00.823) 0:01:14.378 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['os_family'] == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure grubby used to modify selinux kernel parameter] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Saturday 07 March 2026 11:44:28 -0500 (0:00:00.061) 0:01:14.440 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: grubby TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:67 Saturday 07 March 2026 11:44:29 -0500 (0:00:00.841) 0:01:15.281 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: policycoreutils-python-utils TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:81 Saturday 07 March 2026 11:44:30 -0500 (0:00:00.814) 0:01:16.095 ******** skipping: [managed-node2] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:86 Saturday 07 March 2026 11:44:30 -0500 (0:00:00.023) 0:01:16.119 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:91 Saturday 07 March 2026 11:44:30 -0500 (0:00:00.023) 0:01:16.142 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:98 Saturday 07 March 2026 11:44:30 -0500 (0:00:00.021) 0:01:16.164 ******** ok: [managed-node2] TASK [fedora.linux_system_roles.selinux : Run systemctl] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:8 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.852) 0:01:17.017 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Require installed systemd] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:15 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.028) 0:01:17.045 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:20 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.024) 0:01:17.070 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:29 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.026) 0:01:17.097 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(not selinux_state is none and selinux_state | length > 0) or (not selinux_policy is none and selinux_policy | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:40 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.033) 0:01:17.130 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['selinux']['status'] == \"disabled\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:52 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.064) 0:01:17.195 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [Add or remove selinux=0 from args as needed] ***************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:56 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.030) 0:01:17.225 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __update_kernel_param", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:70 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.036) 0:01:17.262 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:77 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.035) 0:01:17.298 ******** skipping: [managed-node2] => { "false_condition": "ansible_facts['selinux']['status'] == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:82 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.045) 0:01:17.343 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:90 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.035) 0:01:17.379 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:95 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.032) 0:01:17.412 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:100 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.033) 0:01:17.446 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:105 Saturday 07 March 2026 11:44:31 -0500 (0:00:00.032) 0:01:17.478 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:110 Saturday 07 March 2026 11:44:32 -0500 (0:00:00.033) 0:01:17.512 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:121 Saturday 07 March 2026 11:44:32 -0500 (0:00:00.033) 0:01:17.545 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:134 Saturday 07 March 2026 11:44:32 -0500 (0:00:00.031) 0:01:17.576 ******** ok: [managed-node2] => (item={'ports': '15001-15003', 'setype': 'http_port_t'}) => { "__selinux_item": { "ports": "15001-15003", "setype": "http_port_t" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "15001-15003" ], "proto": "tcp", "setype": "http_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:146 Saturday 07 March 2026 11:44:32 -0500 (0:00:00.616) 0:01:18.193 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 Saturday 07 March 2026 11:44:32 -0500 (0:00:00.020) 0:01:18.214 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:7bd953bc370c70fe9299b766f8a40a1659e03f7ef4dd6c722c3e182bc90c1c68", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:e8caedff457d24c0562673868860f813a6cf223422bc48524e7cf1e8df7ddeb6", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:1150e95aa33304027895200fbac6de5d0ec1ada237d1cf255f979bcf712831ba", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:634c80be00ac898add54ea6d59ead5a6e92e4d06a230b9b4485059070b0a3bde", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:90f08987cd8645d1bc99245841a9f2d0c9858196064df233655623d1b5cfbdde", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:c59e1e8e511ef99a0e5715ed9dd2c15ea0b522186e683ed8bf715029c4ef325c", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:ee1199b88bcd39ff6de202bdef25f1dc7292828d80856fa535fb80454dad000e", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:3b9f22d94579c8dd60f827159f6f15a2085d9bb799cbc88d7c1d23ce7a63aab4", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:449d303fa3e44bb7afa7b0a715e9566e1e33fd3368aee1b078529f0225cf56ff", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bfefb6205876b2f58e84c1952c749c146f4e2b8107a660e084614b23d60300c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:3a903d39c2d9de406f33790f234fde1f1d0b20bacae36fa0c6bfb5fee9f800c5", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:35030bf2d1dc7ec055a954de113ff7918709262d5c318040b0cbd07018e9ee88", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:5da016180d7da3fa18541f72cc69eb5c9ffebc2851ec3e6150bfd5a73153f860", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:6432b280ab64da2e35f7df339167f29bc9b9dca4c01e8e8a0c409b7a0adbd5d1", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:856e89b68ecf997f8a33e98c7e4bd2250a43f88790efba170f787434139a8c0b", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:78ff1f7154a00c128cbf5c237452baf7ed1cd46cb11378439b64432d1db58d4f", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:1e517a22f8a71ea3ef177798685dfb6359b1006205fdc97a0972ff1cf7125f40", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:454cc3d74ae64acf78ad17344d47579841f6b44266c6c3d56f58594918d2e3cc", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:4d13ddead5cb94be9c944061044e0bd56974a9db9df64f7259593b57d51386d5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:00bebe07cf015d4084870d1f0866913ae687801ca2d26e12c00df6823b3bc304", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:0cb5bf9ff94cee18667b41dc4d1b988ace9baa06ca99507a91ff3190f4e39d35", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:233825c029885cb6196920f19b27336b444411b9a15b956c95a2a07b89e9b041", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:afaeabb15d1d5e4f3d07865c5213f4a78ae5865d0f782e95d1c599e61b7ed7d3", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:2c8ef6be5667ad71b144c8bd4ec606b56cecd4e3ea1d242cbc657c1c993d99af", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dd35cbec0b5e8f81e3394a60905606fb9d986fd394ad60ccedfcdb60f0137b0b", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:e89032180210c66a288c43d2de3a47b285d38fa239226bd49ae19a1a0488f41c", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:96474cc59c799aa0e25123ea9909b4fb319a03f1b5f6cbbf1ae3dcda374815a7", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:7c8fa6c136fc6624a1dd4345c3484ffbc07c9a4be8b7543d78f0615680cb73cc", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:1b066f5d029b5584d34d95007991d218446244f994f3ff802339cd5890e48091", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:60ca58fba194f53faf1c0bc41f8eeeba9ca3de6f2da08f8940b6d1d3093e7c0f", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:815d2bba5c316d5d0334add30dca473daf3fdc85e48785c26c7b47b2ef833823", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:4ec687f59310bcb03685bec14fec451d393508d1ca5f926209ba967d42673d90", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:b6821587c3b2df8dc3ce8de9851cb1be120dfd68e5729141e7a293917029e978", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:df9850293d6833d206bfb3a875bdf69d0823daf24993b30f962da683032555e2", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:de4651616a6c8dea0dd4b018d3ab32c1506ba75188d1bcab2e04af461eea6040", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:91ab7c5c9df2a80b515c52b105f54e9247b092be7864be939d880b2f94cec862", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:bec8a93b694c60226db8744867c6f87775440937699ac0d023e06e7b7aee1d6b", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:3f5f3b049123ab0a61d1f7a7e6372bd7d2194feb212f2b5bd85a9148f21f7db6", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:0ae822bb67f347f0a88f4ec8584f394e3e10fc11363dcf34b1d583305e76c9e6", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d20dacb3b990c66c37bbf1bbd081a84a0e35f3cdf1501c27a5ec881c3d187d84", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:090e59b1324bf559d79a1ef363fe9bc1bd2adb928f6a95bb1628c92f93063415", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:80b987a686635b3e05bedf481ef892af7231100a61fbf6ca5e93da17dbb887c3", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:9fa130934871404f743c4803af509afa78e56b3ba2f83bd108564858f163329f", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:a68798c10fa97ddee5f54ac1d1281ecce65750e4e151076f4ad826187fc647a2", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4e04381e36d9df4d9f19ad718b1ddf4686f633f72b24d1161055b1f7280a81d4", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:33c562fd35e8b9fc5fdf807c488d1ac4adfa6c3b92dbbf87034a6732478e1bf7", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:8279ce237a5b4ffe5a80db09e71f06bdc8a4838910274ffc4e240ec99c185df5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:f89476b4ce6acf51cb0628609027a6c44a90db4ccde4da07505b5332a00b7c63", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:8a072efaf9d6f3af5ec04477f28ec73585274598b69d2e8f24c8180dcfacb15c", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:d5d67d239ca7cd2acbd4c5e15fbbc0f97810139fd352e9966c1e63a7d6ab5188", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a4da29d700315627bf480c63220b2b639ec0b87435f9ecca111eed86c1e019cd", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:36bd297ee2c16ed1564895422c05f51d957f09ef17120ac2efc93dc46d2d81a0", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:053f0dac3d8bc41d9dcbaf9b3f1c2e55ec313e07465db7462fdacf8fd89ce553", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:97391dbc81358c09228185edb79cadecb15bf8641fe8b6f3cf9ef970d79644ec", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:9fb7d00873d78a196b1fb639f107a92cf007803c7eaa2658eba7ed05081acb99", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:59f3c694a3ba5e60ece2b1ddeb5f5bd4f00fdaa67a5c7aa3a8fe7bd302963523", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:c05ca77b6a73640331abcf4018a9b7f2f3733f9e128bd96d7131ab7ba1fa823c", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:0e54e45a5adaa7cc24e6a273e25693919e92f498e42b8e136b7d7bf29be2d6af", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:629423401aaf5d0f529905a421a461d2f1d7ddbdb94020a140831f8873724c39", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:7ec2279bb83c931e6f379f45255a0727d207838ab55930f7595e0ab1e95b8db3", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:601b41f04bdd9789e01a1158241a17c7c4f937c88adbc75e9bf8875ee7cb0756", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:9f9cfd140d7b13b9679ba8b8d7a59366294db02d816d60af2e00a3fff1f6fed9", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:5d64fbf2f59d2c8ce842a9e8adf39877e41bb1d3e77c374681044aafbd662d7d", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:1ce15bea5149f786d9b714426a2870c43d01107f2e3a6bd4b5b324a166508dbf", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:cd287fe5971d71a4512ad52ad855f427c8b722cf7aec6e884ca646ca3da0df2b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f6643411d4b5fbc33bd87d4b3b1d4ea1b5d3659a2092cdee9ecbd4dd700af416", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:41bc4ffe76c9e5c220822efd68a2e55b1126b38f646b7c4016a36263a89e482d", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:fb9a0c7ec7a8627b89649e44dd9e2d6e4cf70166b2a55f6509f898695510376b", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:8b52f0bebd92342ee6b7e00dfe3e20d3a0f041badd4312b9b22b3d3ab0d1b3b1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:22ed092464b3757fcc58749af15cc33319f406db1747f4b28f74feb123969612", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:7633e1cf2075f6323862d89b5e0072681e64e41895b6caabbc8c6b18223dce9c", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1da30094d8664d16dee43b934829c800003e49304f1540e5b41f9fb12a2df4df", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:6cdf81585aeb903ef5da64551f6bde953aeb48f8623a8d416485847541b7b283", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:1af7de0f7c691873148f17453849b3dee97e78a1e8108755c1c133c05f29b651", "enabled": 1 } }, "distcc": { "100": { "checksum": "sha256:bd9199873915ce6fadfc570fba837765971726dac64a74e1ba74c55dc0b24067", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:1205bd72660c46019cfb8c3a899accaefb280f5f6bda63850ee2b508cc4542d6", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:b799553c2c0ab0abd040196142394a15d429e15b573df56edd0e150295d6993c", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:bdaf9c5be3de423b3d1b72c8bf38e2315fd58ce10ca6a58873c7d3e3a9c8aed2", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:1de79cf621df4cb04b8ee1201f38c91d8a23cfd85928894d4f9a8d3a27dd99e1", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5d8847ac4f68cf59bdc174bc1ce3688f86efbdd4a4563f701cdc74b2fa01504c", "enabled": 1 } }, "extra_varrun": { "400": { "checksum": "sha256:6c694e4be5a9d1895e17048eace0eb110c69a81ab1d1e01d59c2a075e08a4f42", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:58fbe8fa7832fec940b7afc7ffe8e4357ddb5a03a662687b928f84029d81c781", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:c821191e37683fab6a25fa714edaa75bcd7a81760fa8b547c31e40967875a29c", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:09288902a734ceef738fc904463b50798ce700c15059c70d092412b12ead156d", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:9fbdec8e421e1fa27dfea13b163cd0810d404845ee724b6f1b3ca5e6500a42c0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:9144a6012aa7771292a276576f811b7948abf4b7fe2e07f05c66d232d5811055", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:ae1f3ce0ff3a003f1db93dbbe09084b0ba32675b332f9930f23f9f5e66f57204", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:60856e056bdd9de8ffce0f5468846b00616fad40f87d38d5fa73acb74475d83b", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:8d10737fea4fe0dd3ae3725002a8f0c5889a3645ba4894e9dccec01a3e51b3d9", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:260a661a05f5958d32eecc692d9d5350d51ec0ef9e9bf29aad653d8637ceba29", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:e206bfbfcbe748672784fe52a91a1220965bcae5ff57dab458ade953f0b17b80", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:8826b12f85b02168080b03dec5eef5c91283ba1ebf8370022a71170064a97dcc", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:00b8b8e23b9e36087646cffa7c5126b0a402ac38a958930d27fd058f78f67987", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:181e899c092e42a648f7474f936d3413769842e4a0192dbc91cf587cd1547ffc", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:54578edd17537e1639df33aa54a731059844519c32cb8dee24e31b29f499dc67", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:325a80a2f12fed84077e57ac8725cdbd3449114115ac74904280c05c4d9f1597", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:9ac486b2d71758e95a106894de9c4f5b21506e07caba5d3753964556cb042fab", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:0a0e0d24bb9866726e90384d92166829d3c43e6086613b425735544745295adf", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:cc208709ab1c0862004f9576e53a62665826c6cdb5f443eb463d8743cc399769", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:9505b4010a4aafa33b27c1a73f02f7fb2ff720e95ef943b40db387b893b7499a", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:a1966f6618bc0d636a87d83d852abba0b92bcb8aaafe82837b39958954490ad5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:80108836908472e7859b47ff8ba90d2c629f02666a3246c2dc7e6039ee1dc099", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:42e7cda751258014b8bf2492522d20dcc0a1c96027d8261b7996289ad136ee7d", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:840c649229032dfd9b5880f50fcd371e5cc4c87fba7d424f03f3f5f28cb1f686", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:ce63d6d0ffc035614b61d82eae48a44485151cb6e93a0617c782116187ab1ad3", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:3b3f4538fdffe23885b90ece09b6859afc8a0b7f3314b9b4a60bcb9525776725", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:8184e98e265b9082358f87a8a715bf235f96c31008e60541b742525e7f09bce2", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a57b0a11f54bad916a170bf890b15978ad925ccc5e976d9d7b94b6c66f7c2e83", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:fc4a2c076ee26500d58559dfd29fe267a6f1ec33515064c8daa16448b7aaca9a", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:b13286a614402a3538fc0387f3d7abc30085c382a33e83faed9be57f33b63f45", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:37d95ab4a25b542db931edf26632d35e3a969239ff1de338b037e2e5ec506fad", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:1eab1ed96a9f87898b99be5005c598d35dc079b1ab5a7214ceb6e3e5c50f8810", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:6719dc568ff70220e53b2f1ed86d9a395a2f038d99901396022e4dc63d4ae868", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c280b017518cea08d176260a60012fd4d62882dcdf6bc9fc2005c74573b2240c", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:a6e5ded6ba1592d16d507e4f87b6078156d99e9554184a9912a3a91819ebb5df", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:a90844f8b8a25de5abadb4887f1b1ac84367f5ae248d9213a90a39859b3e5df3", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:40b455ce92e388b7f1eb0c65645000ae54076221c2acce0fa34c6f8d29d6ee67", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:392808628481e796663a1b99d1340efca31995d4832ec45fe71a939f12c117e7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:59557d1383fbb0a9586e18a4b129912d3ff989dbb853ed29bd0e27dfc160351d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:c850d134886113631f28665513a0536ca98fce16e53a9b3f146d1449ae9e0ee5", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:208231fcd39727d36f759dca410d8675e5852b7330f966aa86dc6e37c9abb22b", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:593cf420e0ac5523489f53d4b0cf2af0eaf8821d841f947349963159834a764a", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:630a305bf2ae45b8211c97cd029f1ae4247e0a00f936d8595e3cff59570cbd5f", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:104ca47441ca07c42c5e4770c1eae2178d2cdb880a174581032c7f846a05fb6e", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:b0baf75f1edb1c27f1caf49a30874604f82791ee1b1c85c38a06195f8d806b0e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:ba9aeb152542b5bd253d5a6e3b6aeff3e857615f4f42836c19098d45263fb120", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:177e6ff2bd9b8e6800b6138497d26b5cdd005046f6c62f672ecc66701b1251c9", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:32c9122d027bf6229b8cf18a4d45fc63e38c5b0a3656312854833e4342e0e608", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:42c6066d4a0751cb1db4526c055b0527a4d9403b45794571ea0dc4c71a666bec", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:997985873de7774ecab07db71db7974723494b65a569e2f852977c25d381359c", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:80496dfdf52576d83029c83097446766868b289a06aab9e9df110b733594a98e", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:c739061ae87ecfdebea9afd0b8021aa3ea154e8e1ef00ba148c82d225ee0c8d2", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:81d97ceabbc97f1b524d3e0e60904f5225fcc44996a83d9db67b7ef3d8b18075", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:8eecfbe8b3b75068c3c26b6fee1cd79009098d65b962b8a847438e8c31e9d053", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:2ae3ef5124e180523c5f610cbd536ad55c7e0b8e7c551201c29827e59c7c1594", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:34f943a522e251615c58df783c4ace2086a1752a3b69e5cbfef2ec5d42234da5", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:a0a2baa7b6c1d5ed5e5582f7ffc7d5a8cf2d4e7d034f50b1f3d0972fc9674939", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:78f45331782c43239be7330f5b928d9dace6b3ebbfda5e07c1374c462fe06923", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:41297d28af002c4e97c864d3b5ee64f49519b4db72a71b5bf7cd104c2b05af0a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:2d6c154dc940a2c178931902f7e0c0a1e9f9956055f92fc1bc92b1f2143a674d", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:33d8e3fbc9f8f48ff7a69685721a782c9f8b62bbbd1878e9bafefad5bdcf51db", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:653fca3667c90bf30da196ab61d79ee5afe1ae9703324b2512180986eec8d6c2", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:949cb7c7b62d17c998f63d9970d6fefbf5b3d56d65f729bf21a4f6703135e3f4", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:c1e22e4778b465a08d815aaf53d71ba28122b061bef976f522a2304366849a2d", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:a308db644962bd0893fe1b8bc6571460b377f728ac28632852ca3b9c281ed74e", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:9925a9acfb6375d93a08546a581a90375ee8582972cfc9d6884204d538b895e6", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:0c9136b18fb83249b1dd825fd497435d852adfaddc9d618ac4d269843a458317", "enabled": 1 } }, "ktls": { "100": { "checksum": "sha256:f15a20f050208e43060eafa61f63a8e722792b76724c7f2fc44c856879ac70ae", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f2322f689c55de691d98651af5bfece0b87608950ccd1a92e9225cfe47415851", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:454587674794c66f8b25f9e90154c291e81f6ab93d7c8fb3107068cfcefb797d", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:4d05909abe38f75a72561bb28fb279f4771d6886406de5d4665111db56181972", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:dbd4d9d61f7e57925f7a61e0a42d65273d8be168f6e3c77b5467d7b9a93817ff", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:3121357ab50a02cfc634a5fe4250aff89a1418865918569b77a10cd333cc0018", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:3390d25acd3ece1c7404db8c3db0f5c80278d5063fab9c8f4a8bb5584b5ded16", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:bc457c7839567f5943e06ec31f915742988f5e602c918a3a0d46bde5b94b6c78", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:d369ef834c0087ca09871e4dff0128cfc8e39a97e1e3b5bd3001fd752b7af5cb", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:c739c49825488aa1ae74fd218a5718aa3c859cd1205a1ea581710fe539bfbde6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:6a59e4d4df92e3d73d66b34035aaf00f5ca0306da24bd478c72a39c7e7844960", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4196d8e4db83bd37b4e883383dfe8543fb33029b42c557fe5af7e8475b558584", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:5427ae01212227c3a719cd1e5664c1290175bd574d7927903102147fa51989c0", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:7d1a24bbfe8deb3a3d7aaa92bfc9c922baba1476561b92f828aae226fe9dc3c4", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:b772895524eef04c9c79093c837e6033beff39717343d76528a8a85e4a466bb6", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:5017fd004213b4ceaf374bebf74e35a0084faaf6cede37b78769036a05e34b9e", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:7c71eef6360c66869a42a19a34ee30abc1064de8fbbcec0098d2ee57fbedb79a", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:cf5a647f3682f454b850317643416460ce6a7710f3f5fec6b0deac40e3c72e07", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:067389c903715a12a93937a436e3df918c42a4871765668bea50eca4f02212ba", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:6cffe11f14b5c03ba0969f0a3f476455cfac505f2cc1f2d467222a21a3ed7c5c", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1ea9c32ae0a7becd1e1879dd4c4b367d450b2721dd8fc3f771081d1568b450f5", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ea5057da646444d5450ff16e5dcb82ab338e8fd5fcf5f8dd72e782ef18ad1031", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:8de073e5cf69c58d03162e50f5fe7537ac8f90c81f02d2906cb10a910a414ec7", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:7d0336a428c29ae9a91c18857f594a16f74f5a963607fff966e7de78102ff76b", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:0464738bfa038fc9ba7ce06c15abf3ff5c2113083e236dd8b96b5d85b1fb51b7", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:9489c6c732b353e34ed3e5624fe8b73c336f4786c47bc30827b4a5a59b7dca44", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:660ecac63132d47b51afaeea6f55f74e3a6f25141a4d0d28065e094d7cdc6c75", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:b0a2d9c52715e340983df89e8adb304ff3790b2564659fd821843a3f172d46d0", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:04b77283c6d821ca98ecb58ef7bd17f6f185168786887a67f4c71cceeaa0476c", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:ff9433431cb560a4ff03dc02129289a0f78d1909fe1f3954347f18e318c3cdc4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:dc069f3a6c78dc367c39cd7e50fe17948cf9877f3e306f090f1160b07989d503", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6890958fb0f7c357a4a9600c34e21bf6fc9fd8ef36e9a5ad516b3bf2c1d88bd6", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b61027e2a84c3f6fffbc7eb3fd40788bd9dfb036b3e04a8f77d233e10c9f2ec8", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:e08540cc55168dd36811b1962936ffacaa21be50b15b9d5d34fa9d55dfd125d8", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:bd730a6479baa42060a62b9c7346dfe21ce28e1a8a432342aa5f302c2cf8ef86", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:01131128229571749a7f5df2e65e22e9850789bfe386926cb34e91153ca9e88c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:edb0f4d496b429a2b09ff9b1d74bd30126b5ee2265a4370f6e992cf9d696de0e", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:b28911955f6731646cd779f6b89c2255238c3e60e1b93d227ce588484694f755", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8bc2fc39e9a6cef06df178607ff3e17604e86d709575d37a60de5c1fd2b9fead", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:6980bdebf1af99aa6822dc970cd6d5a5b430381aa11e96e40244db39265b5e4f", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:3b235676dff7abd25b2b57fa770833d05561bdd24216f4de1202e9ced52a4f4a", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:33be40fa2b50df5f7234ead34a6471ff1eea62de62445e509c28e5bc8a730364", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:0d4fd8a1f74c8e46c18a93794b305dcccf3d50e9db095b659d996712e2905dc0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:d4f61bea290cce978cbb1653866414f9f848bc56ee6491cf022e9131dd2ff5fe", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:6f174abacc65b0de9248c39a31210eecb6fdbcd15ecff5bc254fb0d366f83806", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5053d74b0f4734131234b4faf6cf7815a725bfd5b73b6acf07deb77a3cced1e2", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:0538a3f6b5c469223bfb2740d7365838eedf7ef65b89353645e9d3bf6e17253c", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:f8b11739918f67700fbef58c2ab5c87a61413acf6aa8b650a014285c0c3684e2", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:a3b7c308fe73bec0edcfceb85e1e1799927a4d7e25ec4314649b447f670a49ef", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:dd752acc5dc10414a4708dc0bc655d7861bfa74bb20863aa10335dacc53357ba", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:bd4724acfb4c0ec9283595e24e29f9926c18e7af0169fd5eb344ed00de6bf393", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f1e989b744c90ee0be0978d34da65a84fdd81e5b6aef8ba116560bc157d73f0a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:d2bd05813a6a5257688f9bb486a1bda49fb169eab4f16c3d503e01883c52bd11", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:03597af2e3a916f7c4eb83e1b360b24cad9e86ce814494bd68da602991a70e7e", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:66173ad07abd0c8bb7e529350399507549601923afeca8e2ff2b0f80cb9992e3", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:3399e9663584d6d1032992f903b7aba4f96f4f0b7a5971faf90eb816cc7655b3", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:c1107cdfed17e78cabd9094b3f6aa1d9537f70bb4ddfc236983cc5fdc167e8ca", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c73d5f710032819a6456d1020ef5fc8bb683aeb167b6169f56a295c31b14c72d", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:d733f8dbbcdcfa398f6f139831236fa6cd0abdf132090435bb647081d2f6a785", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:44657ecdfa5bc1235f85a50222e025ac4721b24a01af6d167525f7cb0a580c31", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:92ded69a63e7ecda34b1d8ef17ffae8c9e8075046a724f8f8242f4b66d2eff19", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:5dc833e3b3dd31a1af446c7883f6a2b92c40b9192d072ef5de2fda7ddf4f84ad", "enabled": 1 } }, "passt": { "200": { "checksum": "sha256:d778011449f026622cc05ab496a39b6aa55a7e6447621a5ff7afc242b155b0e2", "enabled": 1 } }, "passt-repair": { "200": { "checksum": "sha256:7db523cb1e14c32587544907a28237c09c418307c349a9c6c5a0095c9ef22533", "enabled": 1 } }, "pasta": { "200": { "checksum": "sha256:cbdee1f9990db7defe1393b55569dcf01a84786f38a49e923b023c7c87bc2571", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:924bf0bf4f0b2ea9d633ef46f55793acb2eb3da6379bacd355814507e5ddf67a", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:8d6835bdf52f73dfd1acf73ce13ea8325b0bd3d0107b0ba86953fe2fbee20330", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:016a326cb4a747756723c0e7d675e4992e8abfd1f51a6c06aa93066bf45412ea", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:ee292c9774f2109ffcef5b2a1ac7ae68e44f719ba40d155f84287fe03a6c01af", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:5d77621f8da0f789c1b9ea9ac24925e02e0a7fe2a3a26cd7e5f46085277041bc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:6cfcf3051765f61e954cd243d3b652cee14d378e4925b12569512e5ae815b40e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:07669cb2df2c61ec4cb621f3332f77f351facaaf5232a8a72c61a5ee7bb44d71", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:24e235787e311d82b99df7b41d724da0e18edc3bc6443f9f83f8d6247e33cbac", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:2c0350e46ff4eb97af27f63025763c565d7097457d4cde6f46088afe7f8929e9", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6c7d4f4b8227aa55a5f142bbb8faef130cd10710101eb6f0aacb62547db5f49b", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:d59109d36dd2868269eb18631e37feb5981db0aa780c55f7e0fb66d897e4f48c", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:93a95273e16837c24572e635d58446ed1162ecbfed59695e866058df4dcbec2c", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:f878b2cf560b4bdff33fedf8c8f2011af390b77ee8f9416fe93ebf46153c97d0", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:7c128725a61bd30f3e35f39b9a832e5cd3ef435dde58241616b24e28f67ffbe1", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:60153b9f850c92927ce2a61becd9c248ef56dc0ceb7ba990185b98eaa9b011bd", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:ae9f1c81d0877b9f40c9d9bb5b862b7c58c73da9045f850a0a72d1b982fada35", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:8d550f8b9e80beafd06bc1392e60ecba8e922f8d0e609fb6674de5cf27c8d772", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ff82ca8bf6365948aeaf3c14fbc7ea9a212074d1462a31aa676b542d0d76c882", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:664148c3f8d4a649714cdbcf15e4862a5e648e0aea83d4530d23866c78c8d8d0", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:d58fb38422b37d406bf3e79136e3a94a40885c08f9c1591975c9a7495b7f606d", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:8194c7df0ea3abd18f07481b0181e01c5fddb21ebb594ed5b20bc1ced555fb27", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:ef1377e6864d9b5049866f6f0c3986e474499f1bb0082e9430f208e2c9d84b54", "enabled": 1 } }, "qgs": { "100": { "checksum": "sha256:add48a13d9b3cc5c82c73c2ca7d72db10b074970c14e26d58b88f670f9221655", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:c5e1779123c640fc55da0871bfd96bb124d8c9b50b9065136c025c83364f453e", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:71a7ff78c03cde811d19a4c115de8a898007bdf437a9350d4708b3f9142481c6", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:e66ffb20855170cda4ec60840ce05e73d69dcc54330c86b24dd89ee96bcd1d73", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:682232f167f6ecaafcb051df5557addc52b814e923f143bf37a2035fb17315ae", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:0fede9cbfe184d19e8ac7bb68a1ce8a110aa45898ca782e3c9daa5649a476fba", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:01fbaabbb5b83721fe19a813401d94510f6fb260714c3adcc40d54fbb994ef70", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:a8e3e2b90df3917dbaf684a1bdf72432d8bf2aa6ec41233e06a2eaf02aa81686", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:8d5ee75190133ca16f3931a80ba1202b6cc171e6a3b1cba6dc5788a33bc84e0a", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:fdf6e82be7b620aaea9c8928edc39344d32dd9b1c4e0f78a6c6fba39bc005b6d", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4788c42c425e54a8dedb4882a6a2bd2183ad72f980f4217299be830afe275069", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:7d65968a2e3d186de718f9f6604f2cce60bd08bab6dbe0e60f60222b228a5744", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:78d9abb7263a5c028d7065c0cadcfe14daf3b4aa064e679458f3bf271a69d2e5", "enabled": 1 } }, "redfish-finder": { "100": { "checksum": "sha256:e05fc89dc14e7a723647597786aa62adc255ca1301474ff0c29dff49e4176e4d", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:825a97c385fbcbfff670278b26a17f91bbfa8585f2219efc48781e0e510bf213", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:695b31e12a82435b57e11459e99444fec8d09aba051b1a12b8efa765608dc719", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:892885a058782b7fdfb5d86e5ec3ecca261363a14a2254652c6a7ff8a52807ae", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:39bc17cbd08c0377eb935fd0ca86b6542752c5ce07cb0f9d9e5d8adfe4306a13", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:3da6785a2c37296fb1ba2a1b621ebccc9e0837d9acf69b3442e75f3a60f2a484", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:912bf2ea73ebbfd1d5fefee37b336a9002345d01f8eb54cb164c28160fc4f1c1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:66b1ecc6382afc5032df2921281550af0431befd8cd517c4f8c68cab2eac0e11", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:4ed93113b5ea0760e89533919f86cf1dd26b5587a9d7cf8bd951896fc77d7fa9", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:008a840aa2183d0fbf1b3f3bb9542a7ba51c03a1e3a415b188ca49d2e4ed7e51", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:3ba51ade82ac9113ee060bb118c88deccc4a7732312c57576fd72a70f40154aa", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:b4fc4fbb8572088eb785b643f5d103d5791af96d37e6cce850d671d9291bf70f", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:6b4e7757f0422a2c54d93e920ff7b2c5bd894d495065b3827a741a768f042b18", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:702d5df73a6865bc249ffb537ad7a0d2388e1540716e4b2f7e844485870e37bb", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:4cfda0dd9868ff0890c7a612f07c282a8cbe4a319c766d7cf842ed639fc2b34c", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:64c59a71e1786fba000398e05773c83fbbd9f92c0341e52cbefd1386357b4e16", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:2f0c18590911b20c58bbc9db0c9c0c471f4d66171f7400079a2e956366580e24", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:f19a726a7c78ddd9aafcf8d2c4b6a57bd05fdc8450a91119e1f0d0abc09151dd", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:b29d987a469d59767e7120202e2abad06865eaa84d3eb61d2ae6b7a78c1d6dca", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:44e8808dad842eb55d51c204374ef445bd8515701db580d2c91f06ca9949f2f6", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:4b1585496c5777fe140f76f11a62df0ddad219336fac090139efbc368520d38c", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:2a990092d1cf38541a49375e9e605d82515a34e19b9ab6b70392afb596e0c612", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:80bda9a30a4b5ab4b6b14d7f6c92efbfd5a63658a4b44565a02c2c552cf4a28c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:405780af5278be0dd7f89425f91ca1c48527743d2b6876bdbdcc7545d487dc09", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:f76f5b094e42967dc240e161cb187bc528f2f2a3ee2ab93c53c0b15d820c0921", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:99c31c501752dfcb8460f44b4e363b9d57b85c3ad422a951f13f2d42e5f9f54b", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:8361387196f6c48bbed95c77561bdd324ab96356d6dd0f4874832accc67738a4", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:89169ffed763d6257769d5ed83185a9eb376145baa60dbf01b4088f37aa663bb", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:7727a62bcf612392c76d46f3cc8c22f33c3c87c30a320805ac9844ce68409ecf", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:1ad633f30ae0f80052b31090652780dab90b10696c098ac81ea831035a652835", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:c9cbfb3894148ab693f0c850232f3a1b1aefe5c5cf5f4a06bc74d44cdd2b52f5", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:67b8654cf2404ad763f5343ad3ded35f198c26e99b8a9a150143911acc89ac6c", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:6ce5485715b3caab30a72313601de971e7118bc2997a2edf6ce7b229e51c2483", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:9ff7693f6fb994a0a53dc46230b7ce6c4fe6dccc2b2ec2c8ba49f7c1e3f24eea", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c888a4b5fc698c1bf7551bfbc6d6ea7673a5f7f41d2467af7e15ce634c71e2be", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:1ed05c5ce069437c9de8a57326a0329d883ec753f3a11fe4f70a43ad212ec482", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:191a531a60c27b33fadbdb48213980f03b68efec3287545eff3592fcdf4bf686", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:e6f726edf701657c80853712b94a4bf5dd0430254d93db45804e60a243c51818", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:8a6ef7c3d8ee76e112224e0c4e0b91572db8c85f547bbed6d7ce3f6f6d4383de", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:cc162915cf1fc3cc66616c3224e9e848485198a28868c237adc9d7077791cba8", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:74b5c41b13bd849ce82040012f557fec4b9cfad3a9072f9f17f78400868da558", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:91acb71305dfde220ce7574e2ac67af16e6f8630639dc66d494cbf8120d2d07a", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:9b8a5c1ff4c21846701eb5e0603cc022f4530c568db6d9fab392e41c0ed64720", "enabled": 1 } }, "slrnpull": { "100": { "checksum": "sha256:bcf004c239b72d23fb4f1e5842272bc20f287cd312ed394464db8cb9218f4377", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:fc3eaf23ee99b98d2ff17a5df04776e8553f490d7f57d49a24061cd49bfaa997", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:17d8fa5ce4b9402dfb10ad431241cb2a5a1b2f726caa03ae7f1d7d410c2ab6ae", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:6506687dbaf850c784d6f2af14197d3c1768514fad98e08fea69e92a780ff65f", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:59b6f3643d2f404ef03d749628b6872fd650b5b10851862b4accad8276bc6f29", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:34b45f69552f2b284b1f6e0876e4a96d1c05c28e4ab42d2bc2a241c03fa73309", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:35ef9c580c4071208af6169ae1059bfee51938d36dbec2bc2354d51ed5dc505d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:5594f07c04c9057b74df1612012c2515265ee04d58b11bfa46a73531b703c1f7", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:b00a50f92d0e8ef2789d03756c7bee69f983edfc4a3f409304835ad25133e3a4", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:874410d4edbbd1f73ef0e69ea40e93054a5d65cfe1556b00f6b474b928400a39", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:400e9b1c9ace97d2e43b5916b453d189a5c6f60133876f15672a48607edfd0ba", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:66beadff1a4ed7e48b3f3cee1444f5f1aaa833d212cdc76068f2f306b8455970", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:fd8c0b8cc073d8025ab8754b7885e0375b4e700dd3fcc921c45666829b652de5", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:1b2a0e330daa04838742fdcd50a9b539072c58d48e949e4a3ce7933da47cbe3c", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:2ab07a8deeb7ef4cf09f94bd2ba250166a4d016bd9c581ddd470ab2784baf5e3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:e7caeb60df6f2002f7be4adc7a1506b6fb585e6bb9f4585381c115a90bff4a15", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:836d01ecc314a2b2b4eaaea69ce1e4a03f3274bd8bd25e2b64d0329e6f9d8f32", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:e2c86cd06c00d3ed79b9f7a602b18593d5929156df58e761a04a3cc3ba8be891", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:67fec37a17724a9b059f936b70c199d96906b9bbf703dd8a1670852dbfc7715f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:dd116a718e125ba88d28936b746a2292088080254134d2001084e2d252ce9379", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:df73dbc3f1e232bb5f4d3ba0bd1850eae3c3bc401508b1819c0989b8f67f8033", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2eb63b8ac8f3038eb1ff3bc18fc5923dee4ac3f609d8a14791300ae835249a9a", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:d342a188298c1fcd4df99c4235985c50ba2f02a4e53d01cef3de48bc31464ceb", "enabled": 1 } }, "switcheroo": { "100": { "checksum": "sha256:f8f67d2c990489a09a436dbd72704b13d6617fdbbb8c5c2c040a85b584de6a7b", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:a8f135ef10becc2a2ffd4e7faf89932ed4aff16331eb62d59e52ff2a5c0966e7", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:fc1ca3d8b12406dfef9f012c9275817169fbfafc411969e60d357be3b35835a8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:ab2acab6cbf273ed7e78e577b0e2a85225adba387b1a8908b180b07adb950e6f", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:815d229f0b5a8f8a44cd511b5927febb002596a8aad1b85406d674e59378a0e5", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:2a643246c63d64d4c57f3877ff3daca2637b195330920c2efd840ebade3fc20b", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:f3896d2de3794d7dd54fea03cbebcdf4e6b63bcc512d2fc14433b3be400f4188", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bbfd79953db88f6db10739803d29b003d83311a21c75604d64ed9fae26da541a", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:71c6423e6318342438fea1ba8a38751b5741b4482ca8ed075dbdd36bc6fda9aa", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:f482585c8f26517c6ed8e9203bec4adadec8ebc65840089d7483e31ee24fa679", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:a5312c216b56620ca8e69679e99275e793b3de9b6e524db1a5678d22b9909056", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:3a4e10afbea76bb0a825f3e10b6be09c1e380f19737aef7a6171a9744c15b33f", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:58aac19837bee6fd1c5e3d1e2a9c9900c56b9aff34b643fa9d958399152afbce", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:46f7b10654f710546a61324618f68b753849ea0b6a7e11f431922a5c848fae89", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:f3d5b0012a6f6d0255e831f608cf0d77f1af38a975b222a7f71cf0821f359246", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:2d749a0f3d39317412feb3388eec0eacb60859891ea7da50373271f03ab66c5a", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:5b1a3e31fee719423530b8c7c07b6649ab539d38f2b446a3e6d3f029a65696ae", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:561814e9fa4d9ffa1be3bcc8e27ee1a50260293a17de3db6eb9d4a83e14e8faf", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:48fac9542e02d0c8f461e03905339795331b4fcb2082e830e83189e50af59040", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:80d84cb83923e4d5d6b9870b4311a67c87609f010c5ffcdcb00ef6e926a8d785", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:33a8bba7a36dc094b6220c0dfe282a9e57ff280511965c99d654f4e584f960f0", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:38e42ce3f0baba47216f3b50d7bec9ac531a11d659c8807d0bb43b5e5b4ce873", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e9267049c61e87edd481214c8cedfc02cb396789c52a150b58d8fbf0401bd455", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:2f55ef3a5145328ed09f316753cec5b85f67c1b43902be5152fc57c4b95c3026", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:51ec0952bf860ec23e3bfdfd53f3bfad841a4e5b560cc25a9548c9b207504194", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ef06a218a285a5a01a1e354d6a40f826815203dc323d00ad68e29f85162c24e7", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:f71781a997aa0d0df5c9baa600b6212105c75cc290bf634a198ed0d5b42a668d", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:f58eadcb76889082e3a109afa993bc7eeed39675991d171a13744bc8b61c279a", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:4b8e317234ae08c1f4a80133c8abba35d412f5797db3c4515d0cf051c35af6bd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:3c2a65084450b2459115a69bb1d382e452a1da63080ac7fdc85bcac36affe1c7", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:ca220cb87bf9790b38738b6f08cc800a2fd0e083960aa4770c9385b897cd31cd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:cfcecf645d2d8a59f98135435d535133a39f70f46d9b47a65b15e88a3805861a", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:91a33317bdd39510dd305d768e2791d08b207d8384bfca22322ec49f5b26f9bd", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:c500e8df08994b81cc1d743db684060d03bfe4465fc12eea9a4af83a69af307b", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:db1d0917d263b447f9a744edfd4ebfeca697182c853295c7eaf49f1270218858", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:84679e67832759be8220885abe3fa0157305fc8f50efa604b1343e99907925dc", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:5ca3d53e3b62d5973442d210faf9b9f5f9b5f4935a74074ce4b18836c8d78b19", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d8fadd99af0d343c815f006330529911a5106641ed9c7d22a2eb72e0d9d55d2d", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:664ab4aa1e1eca422d2c627a22a9631ac348221893713bd9a4d97a628094b1b0", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e68a71817476b5ebb8ae2e13e9ea9418a31dd64ffe4e156258cb77029635cefa", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:f45c6d89a3305814e44a05c0d8c8f8a4ce8a923d721e83c9579f76d8d8cd909d", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:8d828eef8065f2486b815aea04ed491419e3bf17508cf0ce595fca71f872ba38", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:76a11dd14f578f940e874ab4d68ca1370ddfcb2585b6a3a955569fadb77d269f", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:17759c6e3a6229e4a40be0b8121751d768f00fd6ea0a872f4fe65bebe2280b30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:c9c26249a11c4bace4efa998ae826c3cd5178a19d323886a62b7e355ca3d8260", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:ea826918681193d37db69c814ee4c753fef3fcca809cd0fad6f924f829eeb9eb", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:a9e221f7f656f9f0b4937c2bd0f7b93124c7f48f4c88fe8ba608db1eaa5f05d1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:034bceb856cf79ac9329a4affb6cc53cf29c5bebb089c0ddd486a76148812b89", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ea40fa389e6fc510f40994b9b4272a6b985c80064b8a4d702d5813d5252487f5", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:308910f855a076bdf38241880815f6640dfba4b21ef1be58112deec3ed858d16", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:dd07546e8a114e1b7f5056d4c5b0f1256050fe93e867fbbb6c5f52d2c6f77ec6", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:870a818c9c3a4e4d24386bfc3fc7565af1c8aeec605b3d4cd819169172bb3e03", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:476c08aa43723ad6bb98a7254bc6cdad6ddab4aa63336719c192bbf6f5ba6700", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:e27315e58a548c06561117f2dcf86c67e6937dc1ef2071ee612975457091e40c", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:a077f44cc6d16684de9a93061ee0f7b212e3f729fdbdf594dee573fe5c30817d", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:8228eda847eeaa7529b089edb8c64763d03100e84117526a67fbb41ea006a2b0", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Set SELinux modules facts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:164 Saturday 07 March 2026 11:44:35 -0500 (0:00:02.761) 0:01:20.976 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:7bd953bc370c70fe9299b766f8a40a1659e03f7ef4dd6c722c3e182bc90c1c68", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:e8caedff457d24c0562673868860f813a6cf223422bc48524e7cf1e8df7ddeb6", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:1150e95aa33304027895200fbac6de5d0ec1ada237d1cf255f979bcf712831ba", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:634c80be00ac898add54ea6d59ead5a6e92e4d06a230b9b4485059070b0a3bde", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:90f08987cd8645d1bc99245841a9f2d0c9858196064df233655623d1b5cfbdde", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:c59e1e8e511ef99a0e5715ed9dd2c15ea0b522186e683ed8bf715029c4ef325c", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:ee1199b88bcd39ff6de202bdef25f1dc7292828d80856fa535fb80454dad000e", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:3b9f22d94579c8dd60f827159f6f15a2085d9bb799cbc88d7c1d23ce7a63aab4", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:449d303fa3e44bb7afa7b0a715e9566e1e33fd3368aee1b078529f0225cf56ff", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bfefb6205876b2f58e84c1952c749c146f4e2b8107a660e084614b23d60300c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:3a903d39c2d9de406f33790f234fde1f1d0b20bacae36fa0c6bfb5fee9f800c5", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:35030bf2d1dc7ec055a954de113ff7918709262d5c318040b0cbd07018e9ee88", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:5da016180d7da3fa18541f72cc69eb5c9ffebc2851ec3e6150bfd5a73153f860", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:6432b280ab64da2e35f7df339167f29bc9b9dca4c01e8e8a0c409b7a0adbd5d1", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:856e89b68ecf997f8a33e98c7e4bd2250a43f88790efba170f787434139a8c0b", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:78ff1f7154a00c128cbf5c237452baf7ed1cd46cb11378439b64432d1db58d4f", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:1e517a22f8a71ea3ef177798685dfb6359b1006205fdc97a0972ff1cf7125f40", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:454cc3d74ae64acf78ad17344d47579841f6b44266c6c3d56f58594918d2e3cc", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:4d13ddead5cb94be9c944061044e0bd56974a9db9df64f7259593b57d51386d5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:00bebe07cf015d4084870d1f0866913ae687801ca2d26e12c00df6823b3bc304", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:0cb5bf9ff94cee18667b41dc4d1b988ace9baa06ca99507a91ff3190f4e39d35", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:233825c029885cb6196920f19b27336b444411b9a15b956c95a2a07b89e9b041", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:afaeabb15d1d5e4f3d07865c5213f4a78ae5865d0f782e95d1c599e61b7ed7d3", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:2c8ef6be5667ad71b144c8bd4ec606b56cecd4e3ea1d242cbc657c1c993d99af", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dd35cbec0b5e8f81e3394a60905606fb9d986fd394ad60ccedfcdb60f0137b0b", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:e89032180210c66a288c43d2de3a47b285d38fa239226bd49ae19a1a0488f41c", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:96474cc59c799aa0e25123ea9909b4fb319a03f1b5f6cbbf1ae3dcda374815a7", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:7c8fa6c136fc6624a1dd4345c3484ffbc07c9a4be8b7543d78f0615680cb73cc", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:1b066f5d029b5584d34d95007991d218446244f994f3ff802339cd5890e48091", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:60ca58fba194f53faf1c0bc41f8eeeba9ca3de6f2da08f8940b6d1d3093e7c0f", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:815d2bba5c316d5d0334add30dca473daf3fdc85e48785c26c7b47b2ef833823", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:4ec687f59310bcb03685bec14fec451d393508d1ca5f926209ba967d42673d90", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:b6821587c3b2df8dc3ce8de9851cb1be120dfd68e5729141e7a293917029e978", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:df9850293d6833d206bfb3a875bdf69d0823daf24993b30f962da683032555e2", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:de4651616a6c8dea0dd4b018d3ab32c1506ba75188d1bcab2e04af461eea6040", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:91ab7c5c9df2a80b515c52b105f54e9247b092be7864be939d880b2f94cec862", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:bec8a93b694c60226db8744867c6f87775440937699ac0d023e06e7b7aee1d6b", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:3f5f3b049123ab0a61d1f7a7e6372bd7d2194feb212f2b5bd85a9148f21f7db6", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:0ae822bb67f347f0a88f4ec8584f394e3e10fc11363dcf34b1d583305e76c9e6", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d20dacb3b990c66c37bbf1bbd081a84a0e35f3cdf1501c27a5ec881c3d187d84", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:090e59b1324bf559d79a1ef363fe9bc1bd2adb928f6a95bb1628c92f93063415", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:80b987a686635b3e05bedf481ef892af7231100a61fbf6ca5e93da17dbb887c3", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:9fa130934871404f743c4803af509afa78e56b3ba2f83bd108564858f163329f", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:a68798c10fa97ddee5f54ac1d1281ecce65750e4e151076f4ad826187fc647a2", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4e04381e36d9df4d9f19ad718b1ddf4686f633f72b24d1161055b1f7280a81d4", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:33c562fd35e8b9fc5fdf807c488d1ac4adfa6c3b92dbbf87034a6732478e1bf7", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:8279ce237a5b4ffe5a80db09e71f06bdc8a4838910274ffc4e240ec99c185df5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:f89476b4ce6acf51cb0628609027a6c44a90db4ccde4da07505b5332a00b7c63", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:8a072efaf9d6f3af5ec04477f28ec73585274598b69d2e8f24c8180dcfacb15c", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:d5d67d239ca7cd2acbd4c5e15fbbc0f97810139fd352e9966c1e63a7d6ab5188", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a4da29d700315627bf480c63220b2b639ec0b87435f9ecca111eed86c1e019cd", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:36bd297ee2c16ed1564895422c05f51d957f09ef17120ac2efc93dc46d2d81a0", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:053f0dac3d8bc41d9dcbaf9b3f1c2e55ec313e07465db7462fdacf8fd89ce553", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:97391dbc81358c09228185edb79cadecb15bf8641fe8b6f3cf9ef970d79644ec", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:9fb7d00873d78a196b1fb639f107a92cf007803c7eaa2658eba7ed05081acb99", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:59f3c694a3ba5e60ece2b1ddeb5f5bd4f00fdaa67a5c7aa3a8fe7bd302963523", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:c05ca77b6a73640331abcf4018a9b7f2f3733f9e128bd96d7131ab7ba1fa823c", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:0e54e45a5adaa7cc24e6a273e25693919e92f498e42b8e136b7d7bf29be2d6af", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:629423401aaf5d0f529905a421a461d2f1d7ddbdb94020a140831f8873724c39", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:7ec2279bb83c931e6f379f45255a0727d207838ab55930f7595e0ab1e95b8db3", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:601b41f04bdd9789e01a1158241a17c7c4f937c88adbc75e9bf8875ee7cb0756", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:9f9cfd140d7b13b9679ba8b8d7a59366294db02d816d60af2e00a3fff1f6fed9", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:5d64fbf2f59d2c8ce842a9e8adf39877e41bb1d3e77c374681044aafbd662d7d", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:1ce15bea5149f786d9b714426a2870c43d01107f2e3a6bd4b5b324a166508dbf", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:cd287fe5971d71a4512ad52ad855f427c8b722cf7aec6e884ca646ca3da0df2b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f6643411d4b5fbc33bd87d4b3b1d4ea1b5d3659a2092cdee9ecbd4dd700af416", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:41bc4ffe76c9e5c220822efd68a2e55b1126b38f646b7c4016a36263a89e482d", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:fb9a0c7ec7a8627b89649e44dd9e2d6e4cf70166b2a55f6509f898695510376b", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:8b52f0bebd92342ee6b7e00dfe3e20d3a0f041badd4312b9b22b3d3ab0d1b3b1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:22ed092464b3757fcc58749af15cc33319f406db1747f4b28f74feb123969612", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:7633e1cf2075f6323862d89b5e0072681e64e41895b6caabbc8c6b18223dce9c", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1da30094d8664d16dee43b934829c800003e49304f1540e5b41f9fb12a2df4df", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:6cdf81585aeb903ef5da64551f6bde953aeb48f8623a8d416485847541b7b283", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:1af7de0f7c691873148f17453849b3dee97e78a1e8108755c1c133c05f29b651", "enabled": 1 } }, "distcc": { "100": { "checksum": "sha256:bd9199873915ce6fadfc570fba837765971726dac64a74e1ba74c55dc0b24067", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:1205bd72660c46019cfb8c3a899accaefb280f5f6bda63850ee2b508cc4542d6", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:b799553c2c0ab0abd040196142394a15d429e15b573df56edd0e150295d6993c", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:bdaf9c5be3de423b3d1b72c8bf38e2315fd58ce10ca6a58873c7d3e3a9c8aed2", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:1de79cf621df4cb04b8ee1201f38c91d8a23cfd85928894d4f9a8d3a27dd99e1", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5d8847ac4f68cf59bdc174bc1ce3688f86efbdd4a4563f701cdc74b2fa01504c", "enabled": 1 } }, "extra_varrun": { "400": { "checksum": "sha256:6c694e4be5a9d1895e17048eace0eb110c69a81ab1d1e01d59c2a075e08a4f42", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:58fbe8fa7832fec940b7afc7ffe8e4357ddb5a03a662687b928f84029d81c781", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:c821191e37683fab6a25fa714edaa75bcd7a81760fa8b547c31e40967875a29c", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:09288902a734ceef738fc904463b50798ce700c15059c70d092412b12ead156d", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:9fbdec8e421e1fa27dfea13b163cd0810d404845ee724b6f1b3ca5e6500a42c0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:9144a6012aa7771292a276576f811b7948abf4b7fe2e07f05c66d232d5811055", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:ae1f3ce0ff3a003f1db93dbbe09084b0ba32675b332f9930f23f9f5e66f57204", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:60856e056bdd9de8ffce0f5468846b00616fad40f87d38d5fa73acb74475d83b", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:8d10737fea4fe0dd3ae3725002a8f0c5889a3645ba4894e9dccec01a3e51b3d9", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:260a661a05f5958d32eecc692d9d5350d51ec0ef9e9bf29aad653d8637ceba29", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:e206bfbfcbe748672784fe52a91a1220965bcae5ff57dab458ade953f0b17b80", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:8826b12f85b02168080b03dec5eef5c91283ba1ebf8370022a71170064a97dcc", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:00b8b8e23b9e36087646cffa7c5126b0a402ac38a958930d27fd058f78f67987", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:181e899c092e42a648f7474f936d3413769842e4a0192dbc91cf587cd1547ffc", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:54578edd17537e1639df33aa54a731059844519c32cb8dee24e31b29f499dc67", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:325a80a2f12fed84077e57ac8725cdbd3449114115ac74904280c05c4d9f1597", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:9ac486b2d71758e95a106894de9c4f5b21506e07caba5d3753964556cb042fab", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:0a0e0d24bb9866726e90384d92166829d3c43e6086613b425735544745295adf", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:cc208709ab1c0862004f9576e53a62665826c6cdb5f443eb463d8743cc399769", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:9505b4010a4aafa33b27c1a73f02f7fb2ff720e95ef943b40db387b893b7499a", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:a1966f6618bc0d636a87d83d852abba0b92bcb8aaafe82837b39958954490ad5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:80108836908472e7859b47ff8ba90d2c629f02666a3246c2dc7e6039ee1dc099", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:42e7cda751258014b8bf2492522d20dcc0a1c96027d8261b7996289ad136ee7d", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:840c649229032dfd9b5880f50fcd371e5cc4c87fba7d424f03f3f5f28cb1f686", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:ce63d6d0ffc035614b61d82eae48a44485151cb6e93a0617c782116187ab1ad3", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:3b3f4538fdffe23885b90ece09b6859afc8a0b7f3314b9b4a60bcb9525776725", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:8184e98e265b9082358f87a8a715bf235f96c31008e60541b742525e7f09bce2", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a57b0a11f54bad916a170bf890b15978ad925ccc5e976d9d7b94b6c66f7c2e83", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:fc4a2c076ee26500d58559dfd29fe267a6f1ec33515064c8daa16448b7aaca9a", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:b13286a614402a3538fc0387f3d7abc30085c382a33e83faed9be57f33b63f45", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:37d95ab4a25b542db931edf26632d35e3a969239ff1de338b037e2e5ec506fad", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:1eab1ed96a9f87898b99be5005c598d35dc079b1ab5a7214ceb6e3e5c50f8810", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:6719dc568ff70220e53b2f1ed86d9a395a2f038d99901396022e4dc63d4ae868", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c280b017518cea08d176260a60012fd4d62882dcdf6bc9fc2005c74573b2240c", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:a6e5ded6ba1592d16d507e4f87b6078156d99e9554184a9912a3a91819ebb5df", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:a90844f8b8a25de5abadb4887f1b1ac84367f5ae248d9213a90a39859b3e5df3", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:40b455ce92e388b7f1eb0c65645000ae54076221c2acce0fa34c6f8d29d6ee67", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:392808628481e796663a1b99d1340efca31995d4832ec45fe71a939f12c117e7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:59557d1383fbb0a9586e18a4b129912d3ff989dbb853ed29bd0e27dfc160351d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:c850d134886113631f28665513a0536ca98fce16e53a9b3f146d1449ae9e0ee5", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:208231fcd39727d36f759dca410d8675e5852b7330f966aa86dc6e37c9abb22b", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:593cf420e0ac5523489f53d4b0cf2af0eaf8821d841f947349963159834a764a", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:630a305bf2ae45b8211c97cd029f1ae4247e0a00f936d8595e3cff59570cbd5f", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:104ca47441ca07c42c5e4770c1eae2178d2cdb880a174581032c7f846a05fb6e", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:b0baf75f1edb1c27f1caf49a30874604f82791ee1b1c85c38a06195f8d806b0e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:ba9aeb152542b5bd253d5a6e3b6aeff3e857615f4f42836c19098d45263fb120", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:177e6ff2bd9b8e6800b6138497d26b5cdd005046f6c62f672ecc66701b1251c9", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:32c9122d027bf6229b8cf18a4d45fc63e38c5b0a3656312854833e4342e0e608", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:42c6066d4a0751cb1db4526c055b0527a4d9403b45794571ea0dc4c71a666bec", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:997985873de7774ecab07db71db7974723494b65a569e2f852977c25d381359c", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:80496dfdf52576d83029c83097446766868b289a06aab9e9df110b733594a98e", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:c739061ae87ecfdebea9afd0b8021aa3ea154e8e1ef00ba148c82d225ee0c8d2", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:81d97ceabbc97f1b524d3e0e60904f5225fcc44996a83d9db67b7ef3d8b18075", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:8eecfbe8b3b75068c3c26b6fee1cd79009098d65b962b8a847438e8c31e9d053", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:2ae3ef5124e180523c5f610cbd536ad55c7e0b8e7c551201c29827e59c7c1594", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:34f943a522e251615c58df783c4ace2086a1752a3b69e5cbfef2ec5d42234da5", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:a0a2baa7b6c1d5ed5e5582f7ffc7d5a8cf2d4e7d034f50b1f3d0972fc9674939", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:78f45331782c43239be7330f5b928d9dace6b3ebbfda5e07c1374c462fe06923", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:41297d28af002c4e97c864d3b5ee64f49519b4db72a71b5bf7cd104c2b05af0a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:2d6c154dc940a2c178931902f7e0c0a1e9f9956055f92fc1bc92b1f2143a674d", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:33d8e3fbc9f8f48ff7a69685721a782c9f8b62bbbd1878e9bafefad5bdcf51db", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:653fca3667c90bf30da196ab61d79ee5afe1ae9703324b2512180986eec8d6c2", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:949cb7c7b62d17c998f63d9970d6fefbf5b3d56d65f729bf21a4f6703135e3f4", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:c1e22e4778b465a08d815aaf53d71ba28122b061bef976f522a2304366849a2d", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:a308db644962bd0893fe1b8bc6571460b377f728ac28632852ca3b9c281ed74e", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:9925a9acfb6375d93a08546a581a90375ee8582972cfc9d6884204d538b895e6", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:0c9136b18fb83249b1dd825fd497435d852adfaddc9d618ac4d269843a458317", "enabled": 1 } }, "ktls": { "100": { "checksum": "sha256:f15a20f050208e43060eafa61f63a8e722792b76724c7f2fc44c856879ac70ae", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f2322f689c55de691d98651af5bfece0b87608950ccd1a92e9225cfe47415851", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:454587674794c66f8b25f9e90154c291e81f6ab93d7c8fb3107068cfcefb797d", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:4d05909abe38f75a72561bb28fb279f4771d6886406de5d4665111db56181972", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:dbd4d9d61f7e57925f7a61e0a42d65273d8be168f6e3c77b5467d7b9a93817ff", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:3121357ab50a02cfc634a5fe4250aff89a1418865918569b77a10cd333cc0018", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:3390d25acd3ece1c7404db8c3db0f5c80278d5063fab9c8f4a8bb5584b5ded16", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:bc457c7839567f5943e06ec31f915742988f5e602c918a3a0d46bde5b94b6c78", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:d369ef834c0087ca09871e4dff0128cfc8e39a97e1e3b5bd3001fd752b7af5cb", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:c739c49825488aa1ae74fd218a5718aa3c859cd1205a1ea581710fe539bfbde6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:6a59e4d4df92e3d73d66b34035aaf00f5ca0306da24bd478c72a39c7e7844960", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4196d8e4db83bd37b4e883383dfe8543fb33029b42c557fe5af7e8475b558584", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:5427ae01212227c3a719cd1e5664c1290175bd574d7927903102147fa51989c0", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:7d1a24bbfe8deb3a3d7aaa92bfc9c922baba1476561b92f828aae226fe9dc3c4", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:b772895524eef04c9c79093c837e6033beff39717343d76528a8a85e4a466bb6", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:5017fd004213b4ceaf374bebf74e35a0084faaf6cede37b78769036a05e34b9e", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:7c71eef6360c66869a42a19a34ee30abc1064de8fbbcec0098d2ee57fbedb79a", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:cf5a647f3682f454b850317643416460ce6a7710f3f5fec6b0deac40e3c72e07", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:067389c903715a12a93937a436e3df918c42a4871765668bea50eca4f02212ba", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:6cffe11f14b5c03ba0969f0a3f476455cfac505f2cc1f2d467222a21a3ed7c5c", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1ea9c32ae0a7becd1e1879dd4c4b367d450b2721dd8fc3f771081d1568b450f5", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ea5057da646444d5450ff16e5dcb82ab338e8fd5fcf5f8dd72e782ef18ad1031", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:8de073e5cf69c58d03162e50f5fe7537ac8f90c81f02d2906cb10a910a414ec7", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:7d0336a428c29ae9a91c18857f594a16f74f5a963607fff966e7de78102ff76b", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:0464738bfa038fc9ba7ce06c15abf3ff5c2113083e236dd8b96b5d85b1fb51b7", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:9489c6c732b353e34ed3e5624fe8b73c336f4786c47bc30827b4a5a59b7dca44", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:660ecac63132d47b51afaeea6f55f74e3a6f25141a4d0d28065e094d7cdc6c75", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:b0a2d9c52715e340983df89e8adb304ff3790b2564659fd821843a3f172d46d0", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:04b77283c6d821ca98ecb58ef7bd17f6f185168786887a67f4c71cceeaa0476c", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:ff9433431cb560a4ff03dc02129289a0f78d1909fe1f3954347f18e318c3cdc4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:dc069f3a6c78dc367c39cd7e50fe17948cf9877f3e306f090f1160b07989d503", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6890958fb0f7c357a4a9600c34e21bf6fc9fd8ef36e9a5ad516b3bf2c1d88bd6", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b61027e2a84c3f6fffbc7eb3fd40788bd9dfb036b3e04a8f77d233e10c9f2ec8", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:e08540cc55168dd36811b1962936ffacaa21be50b15b9d5d34fa9d55dfd125d8", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:bd730a6479baa42060a62b9c7346dfe21ce28e1a8a432342aa5f302c2cf8ef86", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:01131128229571749a7f5df2e65e22e9850789bfe386926cb34e91153ca9e88c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:edb0f4d496b429a2b09ff9b1d74bd30126b5ee2265a4370f6e992cf9d696de0e", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:b28911955f6731646cd779f6b89c2255238c3e60e1b93d227ce588484694f755", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8bc2fc39e9a6cef06df178607ff3e17604e86d709575d37a60de5c1fd2b9fead", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:6980bdebf1af99aa6822dc970cd6d5a5b430381aa11e96e40244db39265b5e4f", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:3b235676dff7abd25b2b57fa770833d05561bdd24216f4de1202e9ced52a4f4a", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:33be40fa2b50df5f7234ead34a6471ff1eea62de62445e509c28e5bc8a730364", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:0d4fd8a1f74c8e46c18a93794b305dcccf3d50e9db095b659d996712e2905dc0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:d4f61bea290cce978cbb1653866414f9f848bc56ee6491cf022e9131dd2ff5fe", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:6f174abacc65b0de9248c39a31210eecb6fdbcd15ecff5bc254fb0d366f83806", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5053d74b0f4734131234b4faf6cf7815a725bfd5b73b6acf07deb77a3cced1e2", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:0538a3f6b5c469223bfb2740d7365838eedf7ef65b89353645e9d3bf6e17253c", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:f8b11739918f67700fbef58c2ab5c87a61413acf6aa8b650a014285c0c3684e2", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:a3b7c308fe73bec0edcfceb85e1e1799927a4d7e25ec4314649b447f670a49ef", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:dd752acc5dc10414a4708dc0bc655d7861bfa74bb20863aa10335dacc53357ba", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:bd4724acfb4c0ec9283595e24e29f9926c18e7af0169fd5eb344ed00de6bf393", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f1e989b744c90ee0be0978d34da65a84fdd81e5b6aef8ba116560bc157d73f0a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:d2bd05813a6a5257688f9bb486a1bda49fb169eab4f16c3d503e01883c52bd11", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:03597af2e3a916f7c4eb83e1b360b24cad9e86ce814494bd68da602991a70e7e", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:66173ad07abd0c8bb7e529350399507549601923afeca8e2ff2b0f80cb9992e3", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:3399e9663584d6d1032992f903b7aba4f96f4f0b7a5971faf90eb816cc7655b3", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:c1107cdfed17e78cabd9094b3f6aa1d9537f70bb4ddfc236983cc5fdc167e8ca", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c73d5f710032819a6456d1020ef5fc8bb683aeb167b6169f56a295c31b14c72d", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:d733f8dbbcdcfa398f6f139831236fa6cd0abdf132090435bb647081d2f6a785", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:44657ecdfa5bc1235f85a50222e025ac4721b24a01af6d167525f7cb0a580c31", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:92ded69a63e7ecda34b1d8ef17ffae8c9e8075046a724f8f8242f4b66d2eff19", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:5dc833e3b3dd31a1af446c7883f6a2b92c40b9192d072ef5de2fda7ddf4f84ad", "enabled": 1 } }, "passt": { "200": { "checksum": "sha256:d778011449f026622cc05ab496a39b6aa55a7e6447621a5ff7afc242b155b0e2", "enabled": 1 } }, "passt-repair": { "200": { "checksum": "sha256:7db523cb1e14c32587544907a28237c09c418307c349a9c6c5a0095c9ef22533", "enabled": 1 } }, "pasta": { "200": { "checksum": "sha256:cbdee1f9990db7defe1393b55569dcf01a84786f38a49e923b023c7c87bc2571", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:924bf0bf4f0b2ea9d633ef46f55793acb2eb3da6379bacd355814507e5ddf67a", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:8d6835bdf52f73dfd1acf73ce13ea8325b0bd3d0107b0ba86953fe2fbee20330", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:016a326cb4a747756723c0e7d675e4992e8abfd1f51a6c06aa93066bf45412ea", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:ee292c9774f2109ffcef5b2a1ac7ae68e44f719ba40d155f84287fe03a6c01af", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:5d77621f8da0f789c1b9ea9ac24925e02e0a7fe2a3a26cd7e5f46085277041bc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:6cfcf3051765f61e954cd243d3b652cee14d378e4925b12569512e5ae815b40e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:07669cb2df2c61ec4cb621f3332f77f351facaaf5232a8a72c61a5ee7bb44d71", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:24e235787e311d82b99df7b41d724da0e18edc3bc6443f9f83f8d6247e33cbac", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:2c0350e46ff4eb97af27f63025763c565d7097457d4cde6f46088afe7f8929e9", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6c7d4f4b8227aa55a5f142bbb8faef130cd10710101eb6f0aacb62547db5f49b", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:d59109d36dd2868269eb18631e37feb5981db0aa780c55f7e0fb66d897e4f48c", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:93a95273e16837c24572e635d58446ed1162ecbfed59695e866058df4dcbec2c", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:f878b2cf560b4bdff33fedf8c8f2011af390b77ee8f9416fe93ebf46153c97d0", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:7c128725a61bd30f3e35f39b9a832e5cd3ef435dde58241616b24e28f67ffbe1", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:60153b9f850c92927ce2a61becd9c248ef56dc0ceb7ba990185b98eaa9b011bd", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:ae9f1c81d0877b9f40c9d9bb5b862b7c58c73da9045f850a0a72d1b982fada35", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:8d550f8b9e80beafd06bc1392e60ecba8e922f8d0e609fb6674de5cf27c8d772", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ff82ca8bf6365948aeaf3c14fbc7ea9a212074d1462a31aa676b542d0d76c882", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:664148c3f8d4a649714cdbcf15e4862a5e648e0aea83d4530d23866c78c8d8d0", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:d58fb38422b37d406bf3e79136e3a94a40885c08f9c1591975c9a7495b7f606d", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:8194c7df0ea3abd18f07481b0181e01c5fddb21ebb594ed5b20bc1ced555fb27", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:ef1377e6864d9b5049866f6f0c3986e474499f1bb0082e9430f208e2c9d84b54", "enabled": 1 } }, "qgs": { "100": { "checksum": "sha256:add48a13d9b3cc5c82c73c2ca7d72db10b074970c14e26d58b88f670f9221655", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:c5e1779123c640fc55da0871bfd96bb124d8c9b50b9065136c025c83364f453e", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:71a7ff78c03cde811d19a4c115de8a898007bdf437a9350d4708b3f9142481c6", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:e66ffb20855170cda4ec60840ce05e73d69dcc54330c86b24dd89ee96bcd1d73", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:682232f167f6ecaafcb051df5557addc52b814e923f143bf37a2035fb17315ae", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:0fede9cbfe184d19e8ac7bb68a1ce8a110aa45898ca782e3c9daa5649a476fba", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:01fbaabbb5b83721fe19a813401d94510f6fb260714c3adcc40d54fbb994ef70", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:a8e3e2b90df3917dbaf684a1bdf72432d8bf2aa6ec41233e06a2eaf02aa81686", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:8d5ee75190133ca16f3931a80ba1202b6cc171e6a3b1cba6dc5788a33bc84e0a", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:fdf6e82be7b620aaea9c8928edc39344d32dd9b1c4e0f78a6c6fba39bc005b6d", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4788c42c425e54a8dedb4882a6a2bd2183ad72f980f4217299be830afe275069", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:7d65968a2e3d186de718f9f6604f2cce60bd08bab6dbe0e60f60222b228a5744", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:78d9abb7263a5c028d7065c0cadcfe14daf3b4aa064e679458f3bf271a69d2e5", "enabled": 1 } }, "redfish-finder": { "100": { "checksum": "sha256:e05fc89dc14e7a723647597786aa62adc255ca1301474ff0c29dff49e4176e4d", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:825a97c385fbcbfff670278b26a17f91bbfa8585f2219efc48781e0e510bf213", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:695b31e12a82435b57e11459e99444fec8d09aba051b1a12b8efa765608dc719", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:892885a058782b7fdfb5d86e5ec3ecca261363a14a2254652c6a7ff8a52807ae", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:39bc17cbd08c0377eb935fd0ca86b6542752c5ce07cb0f9d9e5d8adfe4306a13", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:3da6785a2c37296fb1ba2a1b621ebccc9e0837d9acf69b3442e75f3a60f2a484", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:912bf2ea73ebbfd1d5fefee37b336a9002345d01f8eb54cb164c28160fc4f1c1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:66b1ecc6382afc5032df2921281550af0431befd8cd517c4f8c68cab2eac0e11", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:4ed93113b5ea0760e89533919f86cf1dd26b5587a9d7cf8bd951896fc77d7fa9", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:008a840aa2183d0fbf1b3f3bb9542a7ba51c03a1e3a415b188ca49d2e4ed7e51", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:3ba51ade82ac9113ee060bb118c88deccc4a7732312c57576fd72a70f40154aa", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:b4fc4fbb8572088eb785b643f5d103d5791af96d37e6cce850d671d9291bf70f", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:6b4e7757f0422a2c54d93e920ff7b2c5bd894d495065b3827a741a768f042b18", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:702d5df73a6865bc249ffb537ad7a0d2388e1540716e4b2f7e844485870e37bb", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:4cfda0dd9868ff0890c7a612f07c282a8cbe4a319c766d7cf842ed639fc2b34c", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:64c59a71e1786fba000398e05773c83fbbd9f92c0341e52cbefd1386357b4e16", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:2f0c18590911b20c58bbc9db0c9c0c471f4d66171f7400079a2e956366580e24", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:f19a726a7c78ddd9aafcf8d2c4b6a57bd05fdc8450a91119e1f0d0abc09151dd", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:b29d987a469d59767e7120202e2abad06865eaa84d3eb61d2ae6b7a78c1d6dca", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:44e8808dad842eb55d51c204374ef445bd8515701db580d2c91f06ca9949f2f6", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:4b1585496c5777fe140f76f11a62df0ddad219336fac090139efbc368520d38c", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:2a990092d1cf38541a49375e9e605d82515a34e19b9ab6b70392afb596e0c612", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:80bda9a30a4b5ab4b6b14d7f6c92efbfd5a63658a4b44565a02c2c552cf4a28c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:405780af5278be0dd7f89425f91ca1c48527743d2b6876bdbdcc7545d487dc09", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:f76f5b094e42967dc240e161cb187bc528f2f2a3ee2ab93c53c0b15d820c0921", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:99c31c501752dfcb8460f44b4e363b9d57b85c3ad422a951f13f2d42e5f9f54b", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:8361387196f6c48bbed95c77561bdd324ab96356d6dd0f4874832accc67738a4", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:89169ffed763d6257769d5ed83185a9eb376145baa60dbf01b4088f37aa663bb", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:7727a62bcf612392c76d46f3cc8c22f33c3c87c30a320805ac9844ce68409ecf", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:1ad633f30ae0f80052b31090652780dab90b10696c098ac81ea831035a652835", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:c9cbfb3894148ab693f0c850232f3a1b1aefe5c5cf5f4a06bc74d44cdd2b52f5", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:67b8654cf2404ad763f5343ad3ded35f198c26e99b8a9a150143911acc89ac6c", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:6ce5485715b3caab30a72313601de971e7118bc2997a2edf6ce7b229e51c2483", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:9ff7693f6fb994a0a53dc46230b7ce6c4fe6dccc2b2ec2c8ba49f7c1e3f24eea", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c888a4b5fc698c1bf7551bfbc6d6ea7673a5f7f41d2467af7e15ce634c71e2be", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:1ed05c5ce069437c9de8a57326a0329d883ec753f3a11fe4f70a43ad212ec482", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:191a531a60c27b33fadbdb48213980f03b68efec3287545eff3592fcdf4bf686", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:e6f726edf701657c80853712b94a4bf5dd0430254d93db45804e60a243c51818", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:8a6ef7c3d8ee76e112224e0c4e0b91572db8c85f547bbed6d7ce3f6f6d4383de", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:cc162915cf1fc3cc66616c3224e9e848485198a28868c237adc9d7077791cba8", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:74b5c41b13bd849ce82040012f557fec4b9cfad3a9072f9f17f78400868da558", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:91acb71305dfde220ce7574e2ac67af16e6f8630639dc66d494cbf8120d2d07a", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:9b8a5c1ff4c21846701eb5e0603cc022f4530c568db6d9fab392e41c0ed64720", "enabled": 1 } }, "slrnpull": { "100": { "checksum": "sha256:bcf004c239b72d23fb4f1e5842272bc20f287cd312ed394464db8cb9218f4377", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:fc3eaf23ee99b98d2ff17a5df04776e8553f490d7f57d49a24061cd49bfaa997", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:17d8fa5ce4b9402dfb10ad431241cb2a5a1b2f726caa03ae7f1d7d410c2ab6ae", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:6506687dbaf850c784d6f2af14197d3c1768514fad98e08fea69e92a780ff65f", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:59b6f3643d2f404ef03d749628b6872fd650b5b10851862b4accad8276bc6f29", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:34b45f69552f2b284b1f6e0876e4a96d1c05c28e4ab42d2bc2a241c03fa73309", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:35ef9c580c4071208af6169ae1059bfee51938d36dbec2bc2354d51ed5dc505d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:5594f07c04c9057b74df1612012c2515265ee04d58b11bfa46a73531b703c1f7", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:b00a50f92d0e8ef2789d03756c7bee69f983edfc4a3f409304835ad25133e3a4", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:874410d4edbbd1f73ef0e69ea40e93054a5d65cfe1556b00f6b474b928400a39", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:400e9b1c9ace97d2e43b5916b453d189a5c6f60133876f15672a48607edfd0ba", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:66beadff1a4ed7e48b3f3cee1444f5f1aaa833d212cdc76068f2f306b8455970", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:fd8c0b8cc073d8025ab8754b7885e0375b4e700dd3fcc921c45666829b652de5", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:1b2a0e330daa04838742fdcd50a9b539072c58d48e949e4a3ce7933da47cbe3c", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:2ab07a8deeb7ef4cf09f94bd2ba250166a4d016bd9c581ddd470ab2784baf5e3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:e7caeb60df6f2002f7be4adc7a1506b6fb585e6bb9f4585381c115a90bff4a15", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:836d01ecc314a2b2b4eaaea69ce1e4a03f3274bd8bd25e2b64d0329e6f9d8f32", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:e2c86cd06c00d3ed79b9f7a602b18593d5929156df58e761a04a3cc3ba8be891", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:67fec37a17724a9b059f936b70c199d96906b9bbf703dd8a1670852dbfc7715f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:dd116a718e125ba88d28936b746a2292088080254134d2001084e2d252ce9379", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:df73dbc3f1e232bb5f4d3ba0bd1850eae3c3bc401508b1819c0989b8f67f8033", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2eb63b8ac8f3038eb1ff3bc18fc5923dee4ac3f609d8a14791300ae835249a9a", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:d342a188298c1fcd4df99c4235985c50ba2f02a4e53d01cef3de48bc31464ceb", "enabled": 1 } }, "switcheroo": { "100": { "checksum": "sha256:f8f67d2c990489a09a436dbd72704b13d6617fdbbb8c5c2c040a85b584de6a7b", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:a8f135ef10becc2a2ffd4e7faf89932ed4aff16331eb62d59e52ff2a5c0966e7", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:fc1ca3d8b12406dfef9f012c9275817169fbfafc411969e60d357be3b35835a8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:ab2acab6cbf273ed7e78e577b0e2a85225adba387b1a8908b180b07adb950e6f", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:815d229f0b5a8f8a44cd511b5927febb002596a8aad1b85406d674e59378a0e5", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:2a643246c63d64d4c57f3877ff3daca2637b195330920c2efd840ebade3fc20b", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:f3896d2de3794d7dd54fea03cbebcdf4e6b63bcc512d2fc14433b3be400f4188", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bbfd79953db88f6db10739803d29b003d83311a21c75604d64ed9fae26da541a", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:71c6423e6318342438fea1ba8a38751b5741b4482ca8ed075dbdd36bc6fda9aa", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:f482585c8f26517c6ed8e9203bec4adadec8ebc65840089d7483e31ee24fa679", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:a5312c216b56620ca8e69679e99275e793b3de9b6e524db1a5678d22b9909056", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:3a4e10afbea76bb0a825f3e10b6be09c1e380f19737aef7a6171a9744c15b33f", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:58aac19837bee6fd1c5e3d1e2a9c9900c56b9aff34b643fa9d958399152afbce", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:46f7b10654f710546a61324618f68b753849ea0b6a7e11f431922a5c848fae89", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:f3d5b0012a6f6d0255e831f608cf0d77f1af38a975b222a7f71cf0821f359246", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:2d749a0f3d39317412feb3388eec0eacb60859891ea7da50373271f03ab66c5a", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:5b1a3e31fee719423530b8c7c07b6649ab539d38f2b446a3e6d3f029a65696ae", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:561814e9fa4d9ffa1be3bcc8e27ee1a50260293a17de3db6eb9d4a83e14e8faf", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:48fac9542e02d0c8f461e03905339795331b4fcb2082e830e83189e50af59040", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:80d84cb83923e4d5d6b9870b4311a67c87609f010c5ffcdcb00ef6e926a8d785", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:33a8bba7a36dc094b6220c0dfe282a9e57ff280511965c99d654f4e584f960f0", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:38e42ce3f0baba47216f3b50d7bec9ac531a11d659c8807d0bb43b5e5b4ce873", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e9267049c61e87edd481214c8cedfc02cb396789c52a150b58d8fbf0401bd455", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:2f55ef3a5145328ed09f316753cec5b85f67c1b43902be5152fc57c4b95c3026", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:51ec0952bf860ec23e3bfdfd53f3bfad841a4e5b560cc25a9548c9b207504194", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ef06a218a285a5a01a1e354d6a40f826815203dc323d00ad68e29f85162c24e7", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:f71781a997aa0d0df5c9baa600b6212105c75cc290bf634a198ed0d5b42a668d", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:f58eadcb76889082e3a109afa993bc7eeed39675991d171a13744bc8b61c279a", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:4b8e317234ae08c1f4a80133c8abba35d412f5797db3c4515d0cf051c35af6bd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:3c2a65084450b2459115a69bb1d382e452a1da63080ac7fdc85bcac36affe1c7", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:ca220cb87bf9790b38738b6f08cc800a2fd0e083960aa4770c9385b897cd31cd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:cfcecf645d2d8a59f98135435d535133a39f70f46d9b47a65b15e88a3805861a", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:91a33317bdd39510dd305d768e2791d08b207d8384bfca22322ec49f5b26f9bd", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:c500e8df08994b81cc1d743db684060d03bfe4465fc12eea9a4af83a69af307b", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:db1d0917d263b447f9a744edfd4ebfeca697182c853295c7eaf49f1270218858", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:84679e67832759be8220885abe3fa0157305fc8f50efa604b1343e99907925dc", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:5ca3d53e3b62d5973442d210faf9b9f5f9b5f4935a74074ce4b18836c8d78b19", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d8fadd99af0d343c815f006330529911a5106641ed9c7d22a2eb72e0d9d55d2d", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:664ab4aa1e1eca422d2c627a22a9631ac348221893713bd9a4d97a628094b1b0", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e68a71817476b5ebb8ae2e13e9ea9418a31dd64ffe4e156258cb77029635cefa", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:f45c6d89a3305814e44a05c0d8c8f8a4ce8a923d721e83c9579f76d8d8cd909d", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:8d828eef8065f2486b815aea04ed491419e3bf17508cf0ce595fca71f872ba38", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:76a11dd14f578f940e874ab4d68ca1370ddfcb2585b6a3a955569fadb77d269f", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:17759c6e3a6229e4a40be0b8121751d768f00fd6ea0a872f4fe65bebe2280b30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:c9c26249a11c4bace4efa998ae826c3cd5178a19d323886a62b7e355ca3d8260", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:ea826918681193d37db69c814ee4c753fef3fcca809cd0fad6f924f829eeb9eb", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:a9e221f7f656f9f0b4937c2bd0f7b93124c7f48f4c88fe8ba608db1eaa5f05d1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:034bceb856cf79ac9329a4affb6cc53cf29c5bebb089c0ddd486a76148812b89", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ea40fa389e6fc510f40994b9b4272a6b985c80064b8a4d702d5813d5252487f5", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:308910f855a076bdf38241880815f6640dfba4b21ef1be58112deec3ed858d16", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:dd07546e8a114e1b7f5056d4c5b0f1256050fe93e867fbbb6c5f52d2c6f77ec6", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:870a818c9c3a4e4d24386bfc3fc7565af1c8aeec605b3d4cd819169172bb3e03", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:476c08aa43723ad6bb98a7254bc6cdad6ddab4aa63336719c192bbf6f5ba6700", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:e27315e58a548c06561117f2dcf86c67e6937dc1ef2071ee612975457091e40c", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:a077f44cc6d16684de9a93061ee0f7b212e3f729fdbdf594dee573fe5c30817d", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:8228eda847eeaa7529b089edb8c64763d03100e84117526a67fbb41ea006a2b0", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:170 Saturday 07 March 2026 11:44:35 -0500 (0:00:00.169) 0:01:21.145 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:183 Saturday 07 March 2026 11:44:35 -0500 (0:00:00.042) 0:01:21.187 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:191 Saturday 07 March 2026 11:44:35 -0500 (0:00:00.035) 0:01:21.223 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Saturday 07 March 2026 11:44:35 -0500 (0:00:00.047) 0:01:21.270 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Saturday 07 March 2026 11:44:35 -0500 (0:00:00.037) 0:01:21.308 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Saturday 07 March 2026 11:44:35 -0500 (0:00:00.032) 0:01:21.340 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Saturday 07 March 2026 11:44:35 -0500 (0:00:00.033) 0:01:21.373 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Saturday 07 March 2026 11:44:35 -0500 (0:00:00.029) 0:01:21.403 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.192) 0:01:21.595 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "state": "created" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: bogus\nspec:\n containers:\n - name: bogus\n image: >-\n quay.io/linux-system-roles/this_is_a_bogus_image:latest\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.046) 0:01:21.641 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": true, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "bogus" }, "spec": { "containers": [ { "image": "quay.io/linux-system-roles/this_is_a_bogus_image:latest", "name": "bogus" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.046) 0:01:21.688 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/this_is_a_bogus_image:latest" ], "__podman_kube_name": "bogus", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:42 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.046) 0:01:21.734 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:10 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.046) 0:01:21.781 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_handle_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:17 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.029) 0:01:21.811 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_handle_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:24 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.027) 0:01:21.839 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.040) 0:01:21.879 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901838.6646128, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "9117e8a5afa3220d98f04938893af461a8e3008b", "ctime": 1772901831.1052737, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9335075, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1771804800.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1635770157", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.396) 0:01:22.276 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.041) 0:01:22.317 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.039) 0:01:22.357 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:73 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.039) 0:01:22.397 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:78 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.026) 0:01:22.423 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:83 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.027) 0:01:22.451 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:93 Saturday 07 March 2026 11:44:36 -0500 (0:00:00.029) 0:01:22.480 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:100 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.029) 0:01:22.510 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:48 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.026) 0:01:22.537 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube is none or __podman_kube | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:57 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.029) 0:01:22.566 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_systemd_scope": "system", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.035) 0:01:22.602 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/etc/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:70 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.035) 0:01:22.637 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/bogus.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:74 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.028) 0:01:22.665 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:81 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.030) 0:01:22.695 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:89 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.020) 0:01:22.716 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:93 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.025) 0:01:22.741 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.131) 0:01:22.872 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:13 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.062) 0:01:22.935 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.038) 0:01:22.973 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.037) 0:01:23.011 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the host mount volumes] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.039) 0:01:23.050 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'volumes' in __podman_kube['spec']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.034) 0:01:23.084 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_volumes | d([]) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.041) 0:01:23.126 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml:2 Saturday 07 March 2026 11:44:37 -0500 (0:00:00.070) 0:01:23.197 ******** ok: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle images when not booted] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml:25 Saturday 07 March 2026 11:44:38 -0500 (0:00:00.766) 0:01:23.964 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_booted", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check the kubernetes yaml file] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:32 Saturday 07 March 2026 11:44:38 -0500 (0:00:00.029) 0:01:23.993 ******** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Ensure the kubernetes directory is present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:40 Saturday 07 March 2026 11:44:38 -0500 (0:00:00.367) 0:01:24.360 ******** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/ansible-kubernetes.d", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 24, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure kubernetes yaml files are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:51 Saturday 07 March 2026 11:44:39 -0500 (0:00:00.385) 0:01:24.746 ******** changed: [managed-node2] => { "changed": true, "checksum": "f8266a972ed3be7e204d2a67883fe3a22b8dbf18", "dest": "/etc/containers/ansible-kubernetes.d/bogus.yml", "gid": 0, "group": "root", "md5sum": "22799c1e99f8fb14db15efc7dbc7ba4c", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 237, "src": "/root/.ansible/tmp/ansible-tmp-1772901879.29425-11520-176634290947885/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Update containers/pods] *************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:63 Saturday 07 March 2026 11:44:39 -0500 (0:00:00.692) 0:01:25.438 ******** fatal: [managed-node2]: FAILED! => { "changed": false } MSG: Output: Error=Trying to pull quay.io/linux-system-roles/this_is_a_bogus_image:latest... Error: unable to copy from source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized TASK [Verify image not pulled] ************************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:262 Saturday 07 March 2026 11:44:40 -0500 (0:00:00.670) 0:01:26.108 ******** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Cleanup] ***************************************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:269 Saturday 07 March 2026 11:44:40 -0500 (0:00:00.028) 0:01:26.137 ******** included: fedora.linux_system_roles.podman for managed-node2 => (item=nopull) included: fedora.linux_system_roles.podman for managed-node2 => (item=bogus) TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 07 March 2026 11:44:40 -0500 (0:00:00.170) 0:01:26.307 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 07 March 2026 11:44:40 -0500 (0:00:00.063) 0:01:26.371 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 07 March 2026 11:44:41 -0500 (0:00:00.138) 0:01:26.509 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 07 March 2026 11:44:41 -0500 (0:00:00.026) 0:01:26.536 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 07 March 2026 11:44:41 -0500 (0:00:00.026) 0:01:26.563 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 07 March 2026 11:44:41 -0500 (0:00:00.025) 0:01:26.588 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 07 March 2026 11:44:41 -0500 (0:00:00.026) 0:01:26.615 ******** skipping: [managed-node2] => (item=RedHat.yml) => { "__vars_file": "RedHat.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "__vars_file": "CentOS.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "__vars_file": "CentOS_10.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "__vars_file": "CentOS_10.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.podman : Run systemctl] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:52 Saturday 07 March 2026 11:44:41 -0500 (0:00:00.057) 0:01:26.673 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Require installed systemd] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:60 Saturday 07 March 2026 11:44:41 -0500 (0:00:00.026) 0:01:26.699 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:65 Saturday 07 March 2026 11:44:41 -0500 (0:00:00.027) 0:01:26.727 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 07 March 2026 11:44:41 -0500 (0:00:00.025) 0:01:26.753 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.978) 0:01:27.731 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.024) 0:01:27.756 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.031) 0:01:27.787 ******** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.022) 0:01:27.810 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.029) 0:01:27.839 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.022) 0:01:27.862 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.021527", "end": "2026-03-07 11:44:42.692475", "rc": 0, "start": "2026-03-07 11:44:42.670948" } STDOUT: podman version 5.8.0 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.399) 0:01:28.262 ******** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.8.0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.025) 0:01:28.288 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.025) 0:01:28.314 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(podman_quadlet_specs | length > 0) or (podman_secrets | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.023) 0:01:28.337 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.018) 0:01:28.356 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.046) 0:01:28.402 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 07 March 2026 11:44:42 -0500 (0:00:00.073) 0:01:28.475 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:10 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.045) 0:01:28.521 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_handle_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:17 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.028) 0:01:28.549 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_handle_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:24 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.030) 0:01:28.580 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.038) 0:01:28.619 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901838.6646128, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "9117e8a5afa3220d98f04938893af461a8e3008b", "ctime": 1772901831.1052737, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9335075, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1771804800.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1635770157", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.369) 0:01:28.988 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.031) 0:01:29.020 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.040) 0:01:29.060 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:73 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.038) 0:01:29.099 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:78 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.036) 0:01:29.136 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:83 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.040) 0:01:29.176 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:93 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.038) 0:01:29.215 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:100 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.039) 0:01:29.254 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.039) 0:01:29.293 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Saturday 07 March 2026 11:44:43 -0500 (0:00:00.105) 0:01:29.399 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.111) 0:01:29.510 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.043) 0:01:29.553 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.037) 0:01:29.591 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.074) 0:01:29.665 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.031) 0:01:29.696 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.027) 0:01:29.724 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.100) 0:01:29.824 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.026) 0:01:29.850 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.023) 0:01:29.874 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.046) 0:01:29.920 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.022) 0:01:29.943 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.024) 0:01:29.967 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.022) 0:01:29.990 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.023) 0:01:30.014 ******** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.136) 0:01:30.150 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:2 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.066) 0:01:30.216 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:10 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.053) 0:01:30.269 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_ostree is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:15 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.042) 0:01:30.311 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_ostree is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:22 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.043) 0:01:30.355 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:27 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.040) 0:01:30.395 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:31 Saturday 07 March 2026 11:44:44 -0500 (0:00:00.042) 0:01:30.438 ******** [WARNING]: TASK: fedora.linux_system_roles.firewall : Set platform/version specific variables: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_10.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_10.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 07 March 2026 11:44:45 -0500 (0:00:00.138) 0:01:30.576 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Run systemctl] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:5 Saturday 07 March 2026 11:44:45 -0500 (0:00:00.075) 0:01:30.651 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Require installed systemd] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:12 Saturday 07 March 2026 11:44:45 -0500 (0:00:00.044) 0:01:30.696 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:17 Saturday 07 March 2026 11:44:45 -0500 (0:00:00.042) 0:01:30.738 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 07 March 2026 11:44:45 -0500 (0:00:00.044) 0:01:30.782 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34 Saturday 07 March 2026 11:44:46 -0500 (0:00:00.978) 0:01:31.761 ******** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:39 Saturday 07 March 2026 11:44:46 -0500 (0:00:00.029) 0:01:31.790 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:44 Saturday 07 March 2026 11:44:46 -0500 (0:00:00.028) 0:01:31.819 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:8 Saturday 07 March 2026 11:44:46 -0500 (0:00:00.027) 0:01:31.847 ******** [WARNING]: TASK: fedora.linux_system_roles.firewall : Check which conflicting services are enabled: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:17 Saturday 07 March 2026 11:44:46 -0500 (0:00:00.033) 0:01:31.880 ******** [WARNING]: TASK: fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'nftables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'iptables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'ufw', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:27 Saturday 07 March 2026 11:44:46 -0500 (0:00:00.033) 0:01:31.914 ******** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2026-03-07 11:44:04 EST", "ActiveEnterTimestampMonotonic": "560506770", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service sysinit.target system.slice dbus.socket dbus-broker.service basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2026-03-07 11:44:03 EST", "AssertTimestampMonotonic": "559785386", "Before": "network-pre.target shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "457654000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2026-03-07 11:44:03 EST", "ConditionTimestampMonotonic": "559785383", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ip6tables.service iptables.service shutdown.target ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4787", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainHandoffTimestampMonotonic": "559816912", "ExecMainPID": "14191", "ExecMainStartTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainStartTimestampMonotonic": "559787917", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2026-03-07 11:44:03 EST", "InactiveExitTimestampMonotonic": "559788901", "InvocationID": "36851f8a440d4a018e15c301930b223c", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "14191", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3028533248", "MemoryCurrent": "34062336", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34328576", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "tmpfs", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target dbus.socket dbus-broker.service", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2026-03-07 11:44:04 EST", "StateChangeTimestampMonotonic": "560506770", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:33 Saturday 07 March 2026 11:44:46 -0500 (0:00:00.520) 0:01:32.435 ******** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2026-03-07 11:44:04 EST", "ActiveEnterTimestampMonotonic": "560506770", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service sysinit.target system.slice dbus.socket dbus-broker.service basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2026-03-07 11:44:03 EST", "AssertTimestampMonotonic": "559785386", "Before": "network-pre.target shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "457654000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2026-03-07 11:44:03 EST", "ConditionTimestampMonotonic": "559785383", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ip6tables.service iptables.service shutdown.target ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4787", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainHandoffTimestampMonotonic": "559816912", "ExecMainPID": "14191", "ExecMainStartTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainStartTimestampMonotonic": "559787917", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2026-03-07 11:44:03 EST", "InactiveExitTimestampMonotonic": "559788901", "InvocationID": "36851f8a440d4a018e15c301930b223c", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "14191", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3045609472", "MemoryCurrent": "34062336", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34328576", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "tmpfs", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target dbus.socket dbus-broker.service", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2026-03-07 11:44:04 EST", "StateChangeTimestampMonotonic": "560506770", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:39 Saturday 07 March 2026 11:44:47 -0500 (0:00:00.551) 0:01:32.986 ******** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:48 Saturday 07 March 2026 11:44:47 -0500 (0:00:00.069) 0:01:33.056 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:61 Saturday 07 March 2026 11:44:47 -0500 (0:00:00.037) 0:01:33.093 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:77 Saturday 07 March 2026 11:44:47 -0500 (0:00:00.033) 0:01:33.127 ******** [WARNING]: TASK: fedora.linux_system_roles.firewall : Configure firewall: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. ok: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "15001-15003/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.556) 0:01:33.684 ******** [WARNING]: TASK: fedora.linux_system_roles.firewall : Gather firewall config information: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. skipping: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "port": "15001-15003/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:141 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.045) 0:01:33.729 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:150 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.038) 0:01:33.768 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:156 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.041) 0:01:33.810 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:165 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.040) 0:01:33.850 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:176 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.038) 0:01:33.888 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:182 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.035) 0:01:33.924 ******** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.051) 0:01:33.976 ******** redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean included: fedora.linux_system_roles.selinux for managed-node2 TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.188) 0:01:34.164 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.054) 0:01:34.219 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.115) 0:01:34.334 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.072) 0:01:34.407 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.039) 0:01:34.446 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Saturday 07 March 2026 11:44:48 -0500 (0:00:00.037) 0:01:34.483 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Saturday 07 March 2026 11:44:49 -0500 (0:00:00.040) 0:01:34.524 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Saturday 07 March 2026 11:44:49 -0500 (0:00:00.042) 0:01:34.567 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['python_version'] is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Saturday 07 March 2026 11:44:49 -0500 (0:00:00.053) 0:01:34.621 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: python3-libselinux python3-policycoreutils TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Saturday 07 March 2026 11:44:49 -0500 (0:00:00.835) 0:01:35.456 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['os_family'] == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure grubby used to modify selinux kernel parameter] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Saturday 07 March 2026 11:44:49 -0500 (0:00:00.040) 0:01:35.496 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: grubby TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:67 Saturday 07 March 2026 11:44:50 -0500 (0:00:00.792) 0:01:36.289 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: policycoreutils-python-utils TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:81 Saturday 07 March 2026 11:44:51 -0500 (0:00:00.812) 0:01:37.102 ******** skipping: [managed-node2] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:86 Saturday 07 March 2026 11:44:51 -0500 (0:00:00.041) 0:01:37.143 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:91 Saturday 07 March 2026 11:44:51 -0500 (0:00:00.040) 0:01:37.184 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:98 Saturday 07 March 2026 11:44:51 -0500 (0:00:00.042) 0:01:37.226 ******** ok: [managed-node2] TASK [fedora.linux_system_roles.selinux : Run systemctl] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:8 Saturday 07 March 2026 11:44:52 -0500 (0:00:00.887) 0:01:38.114 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Require installed systemd] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:15 Saturday 07 March 2026 11:44:52 -0500 (0:00:00.042) 0:01:38.157 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:20 Saturday 07 March 2026 11:44:52 -0500 (0:00:00.043) 0:01:38.200 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:29 Saturday 07 March 2026 11:44:52 -0500 (0:00:00.042) 0:01:38.243 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(not selinux_state is none and selinux_state | length > 0) or (not selinux_policy is none and selinux_policy | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:40 Saturday 07 March 2026 11:44:52 -0500 (0:00:00.055) 0:01:38.298 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['selinux']['status'] == \"disabled\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:52 Saturday 07 March 2026 11:44:52 -0500 (0:00:00.034) 0:01:38.332 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [Add or remove selinux=0 from args as needed] ***************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:56 Saturday 07 March 2026 11:44:52 -0500 (0:00:00.039) 0:01:38.371 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __update_kernel_param", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:70 Saturday 07 March 2026 11:44:52 -0500 (0:00:00.041) 0:01:38.413 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:77 Saturday 07 March 2026 11:44:52 -0500 (0:00:00.023) 0:01:38.437 ******** skipping: [managed-node2] => { "false_condition": "ansible_facts['selinux']['status'] == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:82 Saturday 07 March 2026 11:44:52 -0500 (0:00:00.031) 0:01:38.469 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:90 Saturday 07 March 2026 11:44:52 -0500 (0:00:00.022) 0:01:38.492 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:95 Saturday 07 March 2026 11:44:53 -0500 (0:00:00.068) 0:01:38.560 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:100 Saturday 07 March 2026 11:44:53 -0500 (0:00:00.023) 0:01:38.584 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:105 Saturday 07 March 2026 11:44:53 -0500 (0:00:00.022) 0:01:38.606 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:110 Saturday 07 March 2026 11:44:53 -0500 (0:00:00.027) 0:01:38.633 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:121 Saturday 07 March 2026 11:44:53 -0500 (0:00:00.021) 0:01:38.655 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:134 Saturday 07 March 2026 11:44:53 -0500 (0:00:00.034) 0:01:38.689 ******** ok: [managed-node2] => (item={'ports': '15001-15003', 'setype': 'http_port_t'}) => { "__selinux_item": { "ports": "15001-15003", "setype": "http_port_t" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "15001-15003" ], "proto": "tcp", "setype": "http_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:146 Saturday 07 March 2026 11:44:53 -0500 (0:00:00.634) 0:01:39.323 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 Saturday 07 March 2026 11:44:53 -0500 (0:00:00.021) 0:01:39.345 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:7bd953bc370c70fe9299b766f8a40a1659e03f7ef4dd6c722c3e182bc90c1c68", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:e8caedff457d24c0562673868860f813a6cf223422bc48524e7cf1e8df7ddeb6", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:1150e95aa33304027895200fbac6de5d0ec1ada237d1cf255f979bcf712831ba", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:634c80be00ac898add54ea6d59ead5a6e92e4d06a230b9b4485059070b0a3bde", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:90f08987cd8645d1bc99245841a9f2d0c9858196064df233655623d1b5cfbdde", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:c59e1e8e511ef99a0e5715ed9dd2c15ea0b522186e683ed8bf715029c4ef325c", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:ee1199b88bcd39ff6de202bdef25f1dc7292828d80856fa535fb80454dad000e", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:3b9f22d94579c8dd60f827159f6f15a2085d9bb799cbc88d7c1d23ce7a63aab4", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:449d303fa3e44bb7afa7b0a715e9566e1e33fd3368aee1b078529f0225cf56ff", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bfefb6205876b2f58e84c1952c749c146f4e2b8107a660e084614b23d60300c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:3a903d39c2d9de406f33790f234fde1f1d0b20bacae36fa0c6bfb5fee9f800c5", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:35030bf2d1dc7ec055a954de113ff7918709262d5c318040b0cbd07018e9ee88", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:5da016180d7da3fa18541f72cc69eb5c9ffebc2851ec3e6150bfd5a73153f860", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:6432b280ab64da2e35f7df339167f29bc9b9dca4c01e8e8a0c409b7a0adbd5d1", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:856e89b68ecf997f8a33e98c7e4bd2250a43f88790efba170f787434139a8c0b", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:78ff1f7154a00c128cbf5c237452baf7ed1cd46cb11378439b64432d1db58d4f", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:1e517a22f8a71ea3ef177798685dfb6359b1006205fdc97a0972ff1cf7125f40", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:454cc3d74ae64acf78ad17344d47579841f6b44266c6c3d56f58594918d2e3cc", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:4d13ddead5cb94be9c944061044e0bd56974a9db9df64f7259593b57d51386d5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:00bebe07cf015d4084870d1f0866913ae687801ca2d26e12c00df6823b3bc304", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:0cb5bf9ff94cee18667b41dc4d1b988ace9baa06ca99507a91ff3190f4e39d35", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:233825c029885cb6196920f19b27336b444411b9a15b956c95a2a07b89e9b041", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:afaeabb15d1d5e4f3d07865c5213f4a78ae5865d0f782e95d1c599e61b7ed7d3", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:2c8ef6be5667ad71b144c8bd4ec606b56cecd4e3ea1d242cbc657c1c993d99af", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dd35cbec0b5e8f81e3394a60905606fb9d986fd394ad60ccedfcdb60f0137b0b", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:e89032180210c66a288c43d2de3a47b285d38fa239226bd49ae19a1a0488f41c", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:96474cc59c799aa0e25123ea9909b4fb319a03f1b5f6cbbf1ae3dcda374815a7", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:7c8fa6c136fc6624a1dd4345c3484ffbc07c9a4be8b7543d78f0615680cb73cc", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:1b066f5d029b5584d34d95007991d218446244f994f3ff802339cd5890e48091", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:60ca58fba194f53faf1c0bc41f8eeeba9ca3de6f2da08f8940b6d1d3093e7c0f", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:815d2bba5c316d5d0334add30dca473daf3fdc85e48785c26c7b47b2ef833823", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:4ec687f59310bcb03685bec14fec451d393508d1ca5f926209ba967d42673d90", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:b6821587c3b2df8dc3ce8de9851cb1be120dfd68e5729141e7a293917029e978", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:df9850293d6833d206bfb3a875bdf69d0823daf24993b30f962da683032555e2", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:de4651616a6c8dea0dd4b018d3ab32c1506ba75188d1bcab2e04af461eea6040", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:91ab7c5c9df2a80b515c52b105f54e9247b092be7864be939d880b2f94cec862", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:bec8a93b694c60226db8744867c6f87775440937699ac0d023e06e7b7aee1d6b", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:3f5f3b049123ab0a61d1f7a7e6372bd7d2194feb212f2b5bd85a9148f21f7db6", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:0ae822bb67f347f0a88f4ec8584f394e3e10fc11363dcf34b1d583305e76c9e6", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d20dacb3b990c66c37bbf1bbd081a84a0e35f3cdf1501c27a5ec881c3d187d84", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:090e59b1324bf559d79a1ef363fe9bc1bd2adb928f6a95bb1628c92f93063415", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:80b987a686635b3e05bedf481ef892af7231100a61fbf6ca5e93da17dbb887c3", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:9fa130934871404f743c4803af509afa78e56b3ba2f83bd108564858f163329f", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:a68798c10fa97ddee5f54ac1d1281ecce65750e4e151076f4ad826187fc647a2", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4e04381e36d9df4d9f19ad718b1ddf4686f633f72b24d1161055b1f7280a81d4", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:33c562fd35e8b9fc5fdf807c488d1ac4adfa6c3b92dbbf87034a6732478e1bf7", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:8279ce237a5b4ffe5a80db09e71f06bdc8a4838910274ffc4e240ec99c185df5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:f89476b4ce6acf51cb0628609027a6c44a90db4ccde4da07505b5332a00b7c63", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:8a072efaf9d6f3af5ec04477f28ec73585274598b69d2e8f24c8180dcfacb15c", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:d5d67d239ca7cd2acbd4c5e15fbbc0f97810139fd352e9966c1e63a7d6ab5188", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a4da29d700315627bf480c63220b2b639ec0b87435f9ecca111eed86c1e019cd", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:36bd297ee2c16ed1564895422c05f51d957f09ef17120ac2efc93dc46d2d81a0", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:053f0dac3d8bc41d9dcbaf9b3f1c2e55ec313e07465db7462fdacf8fd89ce553", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:97391dbc81358c09228185edb79cadecb15bf8641fe8b6f3cf9ef970d79644ec", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:9fb7d00873d78a196b1fb639f107a92cf007803c7eaa2658eba7ed05081acb99", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:59f3c694a3ba5e60ece2b1ddeb5f5bd4f00fdaa67a5c7aa3a8fe7bd302963523", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:c05ca77b6a73640331abcf4018a9b7f2f3733f9e128bd96d7131ab7ba1fa823c", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:0e54e45a5adaa7cc24e6a273e25693919e92f498e42b8e136b7d7bf29be2d6af", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:629423401aaf5d0f529905a421a461d2f1d7ddbdb94020a140831f8873724c39", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:7ec2279bb83c931e6f379f45255a0727d207838ab55930f7595e0ab1e95b8db3", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:601b41f04bdd9789e01a1158241a17c7c4f937c88adbc75e9bf8875ee7cb0756", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:9f9cfd140d7b13b9679ba8b8d7a59366294db02d816d60af2e00a3fff1f6fed9", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:5d64fbf2f59d2c8ce842a9e8adf39877e41bb1d3e77c374681044aafbd662d7d", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:1ce15bea5149f786d9b714426a2870c43d01107f2e3a6bd4b5b324a166508dbf", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:cd287fe5971d71a4512ad52ad855f427c8b722cf7aec6e884ca646ca3da0df2b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f6643411d4b5fbc33bd87d4b3b1d4ea1b5d3659a2092cdee9ecbd4dd700af416", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:41bc4ffe76c9e5c220822efd68a2e55b1126b38f646b7c4016a36263a89e482d", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:fb9a0c7ec7a8627b89649e44dd9e2d6e4cf70166b2a55f6509f898695510376b", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:8b52f0bebd92342ee6b7e00dfe3e20d3a0f041badd4312b9b22b3d3ab0d1b3b1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:22ed092464b3757fcc58749af15cc33319f406db1747f4b28f74feb123969612", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:7633e1cf2075f6323862d89b5e0072681e64e41895b6caabbc8c6b18223dce9c", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1da30094d8664d16dee43b934829c800003e49304f1540e5b41f9fb12a2df4df", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:6cdf81585aeb903ef5da64551f6bde953aeb48f8623a8d416485847541b7b283", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:1af7de0f7c691873148f17453849b3dee97e78a1e8108755c1c133c05f29b651", "enabled": 1 } }, "distcc": { "100": { "checksum": "sha256:bd9199873915ce6fadfc570fba837765971726dac64a74e1ba74c55dc0b24067", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:1205bd72660c46019cfb8c3a899accaefb280f5f6bda63850ee2b508cc4542d6", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:b799553c2c0ab0abd040196142394a15d429e15b573df56edd0e150295d6993c", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:bdaf9c5be3de423b3d1b72c8bf38e2315fd58ce10ca6a58873c7d3e3a9c8aed2", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:1de79cf621df4cb04b8ee1201f38c91d8a23cfd85928894d4f9a8d3a27dd99e1", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5d8847ac4f68cf59bdc174bc1ce3688f86efbdd4a4563f701cdc74b2fa01504c", "enabled": 1 } }, "extra_varrun": { "400": { "checksum": "sha256:6c694e4be5a9d1895e17048eace0eb110c69a81ab1d1e01d59c2a075e08a4f42", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:58fbe8fa7832fec940b7afc7ffe8e4357ddb5a03a662687b928f84029d81c781", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:c821191e37683fab6a25fa714edaa75bcd7a81760fa8b547c31e40967875a29c", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:09288902a734ceef738fc904463b50798ce700c15059c70d092412b12ead156d", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:9fbdec8e421e1fa27dfea13b163cd0810d404845ee724b6f1b3ca5e6500a42c0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:9144a6012aa7771292a276576f811b7948abf4b7fe2e07f05c66d232d5811055", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:ae1f3ce0ff3a003f1db93dbbe09084b0ba32675b332f9930f23f9f5e66f57204", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:60856e056bdd9de8ffce0f5468846b00616fad40f87d38d5fa73acb74475d83b", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:8d10737fea4fe0dd3ae3725002a8f0c5889a3645ba4894e9dccec01a3e51b3d9", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:260a661a05f5958d32eecc692d9d5350d51ec0ef9e9bf29aad653d8637ceba29", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:e206bfbfcbe748672784fe52a91a1220965bcae5ff57dab458ade953f0b17b80", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:8826b12f85b02168080b03dec5eef5c91283ba1ebf8370022a71170064a97dcc", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:00b8b8e23b9e36087646cffa7c5126b0a402ac38a958930d27fd058f78f67987", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:181e899c092e42a648f7474f936d3413769842e4a0192dbc91cf587cd1547ffc", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:54578edd17537e1639df33aa54a731059844519c32cb8dee24e31b29f499dc67", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:325a80a2f12fed84077e57ac8725cdbd3449114115ac74904280c05c4d9f1597", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:9ac486b2d71758e95a106894de9c4f5b21506e07caba5d3753964556cb042fab", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:0a0e0d24bb9866726e90384d92166829d3c43e6086613b425735544745295adf", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:cc208709ab1c0862004f9576e53a62665826c6cdb5f443eb463d8743cc399769", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:9505b4010a4aafa33b27c1a73f02f7fb2ff720e95ef943b40db387b893b7499a", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:a1966f6618bc0d636a87d83d852abba0b92bcb8aaafe82837b39958954490ad5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:80108836908472e7859b47ff8ba90d2c629f02666a3246c2dc7e6039ee1dc099", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:42e7cda751258014b8bf2492522d20dcc0a1c96027d8261b7996289ad136ee7d", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:840c649229032dfd9b5880f50fcd371e5cc4c87fba7d424f03f3f5f28cb1f686", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:ce63d6d0ffc035614b61d82eae48a44485151cb6e93a0617c782116187ab1ad3", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:3b3f4538fdffe23885b90ece09b6859afc8a0b7f3314b9b4a60bcb9525776725", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:8184e98e265b9082358f87a8a715bf235f96c31008e60541b742525e7f09bce2", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a57b0a11f54bad916a170bf890b15978ad925ccc5e976d9d7b94b6c66f7c2e83", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:fc4a2c076ee26500d58559dfd29fe267a6f1ec33515064c8daa16448b7aaca9a", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:b13286a614402a3538fc0387f3d7abc30085c382a33e83faed9be57f33b63f45", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:37d95ab4a25b542db931edf26632d35e3a969239ff1de338b037e2e5ec506fad", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:1eab1ed96a9f87898b99be5005c598d35dc079b1ab5a7214ceb6e3e5c50f8810", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:6719dc568ff70220e53b2f1ed86d9a395a2f038d99901396022e4dc63d4ae868", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c280b017518cea08d176260a60012fd4d62882dcdf6bc9fc2005c74573b2240c", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:a6e5ded6ba1592d16d507e4f87b6078156d99e9554184a9912a3a91819ebb5df", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:a90844f8b8a25de5abadb4887f1b1ac84367f5ae248d9213a90a39859b3e5df3", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:40b455ce92e388b7f1eb0c65645000ae54076221c2acce0fa34c6f8d29d6ee67", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:392808628481e796663a1b99d1340efca31995d4832ec45fe71a939f12c117e7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:59557d1383fbb0a9586e18a4b129912d3ff989dbb853ed29bd0e27dfc160351d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:c850d134886113631f28665513a0536ca98fce16e53a9b3f146d1449ae9e0ee5", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:208231fcd39727d36f759dca410d8675e5852b7330f966aa86dc6e37c9abb22b", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:593cf420e0ac5523489f53d4b0cf2af0eaf8821d841f947349963159834a764a", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:630a305bf2ae45b8211c97cd029f1ae4247e0a00f936d8595e3cff59570cbd5f", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:104ca47441ca07c42c5e4770c1eae2178d2cdb880a174581032c7f846a05fb6e", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:b0baf75f1edb1c27f1caf49a30874604f82791ee1b1c85c38a06195f8d806b0e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:ba9aeb152542b5bd253d5a6e3b6aeff3e857615f4f42836c19098d45263fb120", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:177e6ff2bd9b8e6800b6138497d26b5cdd005046f6c62f672ecc66701b1251c9", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:32c9122d027bf6229b8cf18a4d45fc63e38c5b0a3656312854833e4342e0e608", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:42c6066d4a0751cb1db4526c055b0527a4d9403b45794571ea0dc4c71a666bec", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:997985873de7774ecab07db71db7974723494b65a569e2f852977c25d381359c", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:80496dfdf52576d83029c83097446766868b289a06aab9e9df110b733594a98e", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:c739061ae87ecfdebea9afd0b8021aa3ea154e8e1ef00ba148c82d225ee0c8d2", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:81d97ceabbc97f1b524d3e0e60904f5225fcc44996a83d9db67b7ef3d8b18075", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:8eecfbe8b3b75068c3c26b6fee1cd79009098d65b962b8a847438e8c31e9d053", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:2ae3ef5124e180523c5f610cbd536ad55c7e0b8e7c551201c29827e59c7c1594", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:34f943a522e251615c58df783c4ace2086a1752a3b69e5cbfef2ec5d42234da5", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:a0a2baa7b6c1d5ed5e5582f7ffc7d5a8cf2d4e7d034f50b1f3d0972fc9674939", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:78f45331782c43239be7330f5b928d9dace6b3ebbfda5e07c1374c462fe06923", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:41297d28af002c4e97c864d3b5ee64f49519b4db72a71b5bf7cd104c2b05af0a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:2d6c154dc940a2c178931902f7e0c0a1e9f9956055f92fc1bc92b1f2143a674d", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:33d8e3fbc9f8f48ff7a69685721a782c9f8b62bbbd1878e9bafefad5bdcf51db", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:653fca3667c90bf30da196ab61d79ee5afe1ae9703324b2512180986eec8d6c2", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:949cb7c7b62d17c998f63d9970d6fefbf5b3d56d65f729bf21a4f6703135e3f4", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:c1e22e4778b465a08d815aaf53d71ba28122b061bef976f522a2304366849a2d", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:a308db644962bd0893fe1b8bc6571460b377f728ac28632852ca3b9c281ed74e", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:9925a9acfb6375d93a08546a581a90375ee8582972cfc9d6884204d538b895e6", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:0c9136b18fb83249b1dd825fd497435d852adfaddc9d618ac4d269843a458317", "enabled": 1 } }, "ktls": { "100": { "checksum": "sha256:f15a20f050208e43060eafa61f63a8e722792b76724c7f2fc44c856879ac70ae", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f2322f689c55de691d98651af5bfece0b87608950ccd1a92e9225cfe47415851", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:454587674794c66f8b25f9e90154c291e81f6ab93d7c8fb3107068cfcefb797d", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:4d05909abe38f75a72561bb28fb279f4771d6886406de5d4665111db56181972", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:dbd4d9d61f7e57925f7a61e0a42d65273d8be168f6e3c77b5467d7b9a93817ff", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:3121357ab50a02cfc634a5fe4250aff89a1418865918569b77a10cd333cc0018", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:3390d25acd3ece1c7404db8c3db0f5c80278d5063fab9c8f4a8bb5584b5ded16", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:bc457c7839567f5943e06ec31f915742988f5e602c918a3a0d46bde5b94b6c78", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:d369ef834c0087ca09871e4dff0128cfc8e39a97e1e3b5bd3001fd752b7af5cb", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:c739c49825488aa1ae74fd218a5718aa3c859cd1205a1ea581710fe539bfbde6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:6a59e4d4df92e3d73d66b34035aaf00f5ca0306da24bd478c72a39c7e7844960", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4196d8e4db83bd37b4e883383dfe8543fb33029b42c557fe5af7e8475b558584", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:5427ae01212227c3a719cd1e5664c1290175bd574d7927903102147fa51989c0", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:7d1a24bbfe8deb3a3d7aaa92bfc9c922baba1476561b92f828aae226fe9dc3c4", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:b772895524eef04c9c79093c837e6033beff39717343d76528a8a85e4a466bb6", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:5017fd004213b4ceaf374bebf74e35a0084faaf6cede37b78769036a05e34b9e", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:7c71eef6360c66869a42a19a34ee30abc1064de8fbbcec0098d2ee57fbedb79a", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:cf5a647f3682f454b850317643416460ce6a7710f3f5fec6b0deac40e3c72e07", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:067389c903715a12a93937a436e3df918c42a4871765668bea50eca4f02212ba", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:6cffe11f14b5c03ba0969f0a3f476455cfac505f2cc1f2d467222a21a3ed7c5c", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1ea9c32ae0a7becd1e1879dd4c4b367d450b2721dd8fc3f771081d1568b450f5", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ea5057da646444d5450ff16e5dcb82ab338e8fd5fcf5f8dd72e782ef18ad1031", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:8de073e5cf69c58d03162e50f5fe7537ac8f90c81f02d2906cb10a910a414ec7", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:7d0336a428c29ae9a91c18857f594a16f74f5a963607fff966e7de78102ff76b", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:0464738bfa038fc9ba7ce06c15abf3ff5c2113083e236dd8b96b5d85b1fb51b7", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:9489c6c732b353e34ed3e5624fe8b73c336f4786c47bc30827b4a5a59b7dca44", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:660ecac63132d47b51afaeea6f55f74e3a6f25141a4d0d28065e094d7cdc6c75", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:b0a2d9c52715e340983df89e8adb304ff3790b2564659fd821843a3f172d46d0", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:04b77283c6d821ca98ecb58ef7bd17f6f185168786887a67f4c71cceeaa0476c", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:ff9433431cb560a4ff03dc02129289a0f78d1909fe1f3954347f18e318c3cdc4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:dc069f3a6c78dc367c39cd7e50fe17948cf9877f3e306f090f1160b07989d503", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6890958fb0f7c357a4a9600c34e21bf6fc9fd8ef36e9a5ad516b3bf2c1d88bd6", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b61027e2a84c3f6fffbc7eb3fd40788bd9dfb036b3e04a8f77d233e10c9f2ec8", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:e08540cc55168dd36811b1962936ffacaa21be50b15b9d5d34fa9d55dfd125d8", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:bd730a6479baa42060a62b9c7346dfe21ce28e1a8a432342aa5f302c2cf8ef86", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:01131128229571749a7f5df2e65e22e9850789bfe386926cb34e91153ca9e88c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:edb0f4d496b429a2b09ff9b1d74bd30126b5ee2265a4370f6e992cf9d696de0e", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:b28911955f6731646cd779f6b89c2255238c3e60e1b93d227ce588484694f755", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8bc2fc39e9a6cef06df178607ff3e17604e86d709575d37a60de5c1fd2b9fead", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:6980bdebf1af99aa6822dc970cd6d5a5b430381aa11e96e40244db39265b5e4f", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:3b235676dff7abd25b2b57fa770833d05561bdd24216f4de1202e9ced52a4f4a", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:33be40fa2b50df5f7234ead34a6471ff1eea62de62445e509c28e5bc8a730364", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:0d4fd8a1f74c8e46c18a93794b305dcccf3d50e9db095b659d996712e2905dc0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:d4f61bea290cce978cbb1653866414f9f848bc56ee6491cf022e9131dd2ff5fe", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:6f174abacc65b0de9248c39a31210eecb6fdbcd15ecff5bc254fb0d366f83806", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5053d74b0f4734131234b4faf6cf7815a725bfd5b73b6acf07deb77a3cced1e2", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:0538a3f6b5c469223bfb2740d7365838eedf7ef65b89353645e9d3bf6e17253c", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:f8b11739918f67700fbef58c2ab5c87a61413acf6aa8b650a014285c0c3684e2", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:a3b7c308fe73bec0edcfceb85e1e1799927a4d7e25ec4314649b447f670a49ef", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:dd752acc5dc10414a4708dc0bc655d7861bfa74bb20863aa10335dacc53357ba", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:bd4724acfb4c0ec9283595e24e29f9926c18e7af0169fd5eb344ed00de6bf393", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f1e989b744c90ee0be0978d34da65a84fdd81e5b6aef8ba116560bc157d73f0a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:d2bd05813a6a5257688f9bb486a1bda49fb169eab4f16c3d503e01883c52bd11", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:03597af2e3a916f7c4eb83e1b360b24cad9e86ce814494bd68da602991a70e7e", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:66173ad07abd0c8bb7e529350399507549601923afeca8e2ff2b0f80cb9992e3", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:3399e9663584d6d1032992f903b7aba4f96f4f0b7a5971faf90eb816cc7655b3", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:c1107cdfed17e78cabd9094b3f6aa1d9537f70bb4ddfc236983cc5fdc167e8ca", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c73d5f710032819a6456d1020ef5fc8bb683aeb167b6169f56a295c31b14c72d", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:d733f8dbbcdcfa398f6f139831236fa6cd0abdf132090435bb647081d2f6a785", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:44657ecdfa5bc1235f85a50222e025ac4721b24a01af6d167525f7cb0a580c31", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:92ded69a63e7ecda34b1d8ef17ffae8c9e8075046a724f8f8242f4b66d2eff19", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:5dc833e3b3dd31a1af446c7883f6a2b92c40b9192d072ef5de2fda7ddf4f84ad", "enabled": 1 } }, "passt": { "200": { "checksum": "sha256:d778011449f026622cc05ab496a39b6aa55a7e6447621a5ff7afc242b155b0e2", "enabled": 1 } }, "passt-repair": { "200": { "checksum": "sha256:7db523cb1e14c32587544907a28237c09c418307c349a9c6c5a0095c9ef22533", "enabled": 1 } }, "pasta": { "200": { "checksum": "sha256:cbdee1f9990db7defe1393b55569dcf01a84786f38a49e923b023c7c87bc2571", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:924bf0bf4f0b2ea9d633ef46f55793acb2eb3da6379bacd355814507e5ddf67a", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:8d6835bdf52f73dfd1acf73ce13ea8325b0bd3d0107b0ba86953fe2fbee20330", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:016a326cb4a747756723c0e7d675e4992e8abfd1f51a6c06aa93066bf45412ea", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:ee292c9774f2109ffcef5b2a1ac7ae68e44f719ba40d155f84287fe03a6c01af", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:5d77621f8da0f789c1b9ea9ac24925e02e0a7fe2a3a26cd7e5f46085277041bc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:6cfcf3051765f61e954cd243d3b652cee14d378e4925b12569512e5ae815b40e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:07669cb2df2c61ec4cb621f3332f77f351facaaf5232a8a72c61a5ee7bb44d71", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:24e235787e311d82b99df7b41d724da0e18edc3bc6443f9f83f8d6247e33cbac", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:2c0350e46ff4eb97af27f63025763c565d7097457d4cde6f46088afe7f8929e9", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6c7d4f4b8227aa55a5f142bbb8faef130cd10710101eb6f0aacb62547db5f49b", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:d59109d36dd2868269eb18631e37feb5981db0aa780c55f7e0fb66d897e4f48c", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:93a95273e16837c24572e635d58446ed1162ecbfed59695e866058df4dcbec2c", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:f878b2cf560b4bdff33fedf8c8f2011af390b77ee8f9416fe93ebf46153c97d0", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:7c128725a61bd30f3e35f39b9a832e5cd3ef435dde58241616b24e28f67ffbe1", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:60153b9f850c92927ce2a61becd9c248ef56dc0ceb7ba990185b98eaa9b011bd", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:ae9f1c81d0877b9f40c9d9bb5b862b7c58c73da9045f850a0a72d1b982fada35", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:8d550f8b9e80beafd06bc1392e60ecba8e922f8d0e609fb6674de5cf27c8d772", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ff82ca8bf6365948aeaf3c14fbc7ea9a212074d1462a31aa676b542d0d76c882", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:664148c3f8d4a649714cdbcf15e4862a5e648e0aea83d4530d23866c78c8d8d0", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:d58fb38422b37d406bf3e79136e3a94a40885c08f9c1591975c9a7495b7f606d", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:8194c7df0ea3abd18f07481b0181e01c5fddb21ebb594ed5b20bc1ced555fb27", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:ef1377e6864d9b5049866f6f0c3986e474499f1bb0082e9430f208e2c9d84b54", "enabled": 1 } }, "qgs": { "100": { "checksum": "sha256:add48a13d9b3cc5c82c73c2ca7d72db10b074970c14e26d58b88f670f9221655", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:c5e1779123c640fc55da0871bfd96bb124d8c9b50b9065136c025c83364f453e", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:71a7ff78c03cde811d19a4c115de8a898007bdf437a9350d4708b3f9142481c6", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:e66ffb20855170cda4ec60840ce05e73d69dcc54330c86b24dd89ee96bcd1d73", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:682232f167f6ecaafcb051df5557addc52b814e923f143bf37a2035fb17315ae", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:0fede9cbfe184d19e8ac7bb68a1ce8a110aa45898ca782e3c9daa5649a476fba", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:01fbaabbb5b83721fe19a813401d94510f6fb260714c3adcc40d54fbb994ef70", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:a8e3e2b90df3917dbaf684a1bdf72432d8bf2aa6ec41233e06a2eaf02aa81686", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:8d5ee75190133ca16f3931a80ba1202b6cc171e6a3b1cba6dc5788a33bc84e0a", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:fdf6e82be7b620aaea9c8928edc39344d32dd9b1c4e0f78a6c6fba39bc005b6d", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4788c42c425e54a8dedb4882a6a2bd2183ad72f980f4217299be830afe275069", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:7d65968a2e3d186de718f9f6604f2cce60bd08bab6dbe0e60f60222b228a5744", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:78d9abb7263a5c028d7065c0cadcfe14daf3b4aa064e679458f3bf271a69d2e5", "enabled": 1 } }, "redfish-finder": { "100": { "checksum": "sha256:e05fc89dc14e7a723647597786aa62adc255ca1301474ff0c29dff49e4176e4d", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:825a97c385fbcbfff670278b26a17f91bbfa8585f2219efc48781e0e510bf213", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:695b31e12a82435b57e11459e99444fec8d09aba051b1a12b8efa765608dc719", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:892885a058782b7fdfb5d86e5ec3ecca261363a14a2254652c6a7ff8a52807ae", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:39bc17cbd08c0377eb935fd0ca86b6542752c5ce07cb0f9d9e5d8adfe4306a13", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:3da6785a2c37296fb1ba2a1b621ebccc9e0837d9acf69b3442e75f3a60f2a484", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:912bf2ea73ebbfd1d5fefee37b336a9002345d01f8eb54cb164c28160fc4f1c1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:66b1ecc6382afc5032df2921281550af0431befd8cd517c4f8c68cab2eac0e11", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:4ed93113b5ea0760e89533919f86cf1dd26b5587a9d7cf8bd951896fc77d7fa9", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:008a840aa2183d0fbf1b3f3bb9542a7ba51c03a1e3a415b188ca49d2e4ed7e51", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:3ba51ade82ac9113ee060bb118c88deccc4a7732312c57576fd72a70f40154aa", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:b4fc4fbb8572088eb785b643f5d103d5791af96d37e6cce850d671d9291bf70f", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:6b4e7757f0422a2c54d93e920ff7b2c5bd894d495065b3827a741a768f042b18", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:702d5df73a6865bc249ffb537ad7a0d2388e1540716e4b2f7e844485870e37bb", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:4cfda0dd9868ff0890c7a612f07c282a8cbe4a319c766d7cf842ed639fc2b34c", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:64c59a71e1786fba000398e05773c83fbbd9f92c0341e52cbefd1386357b4e16", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:2f0c18590911b20c58bbc9db0c9c0c471f4d66171f7400079a2e956366580e24", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:f19a726a7c78ddd9aafcf8d2c4b6a57bd05fdc8450a91119e1f0d0abc09151dd", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:b29d987a469d59767e7120202e2abad06865eaa84d3eb61d2ae6b7a78c1d6dca", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:44e8808dad842eb55d51c204374ef445bd8515701db580d2c91f06ca9949f2f6", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:4b1585496c5777fe140f76f11a62df0ddad219336fac090139efbc368520d38c", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:2a990092d1cf38541a49375e9e605d82515a34e19b9ab6b70392afb596e0c612", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:80bda9a30a4b5ab4b6b14d7f6c92efbfd5a63658a4b44565a02c2c552cf4a28c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:405780af5278be0dd7f89425f91ca1c48527743d2b6876bdbdcc7545d487dc09", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:f76f5b094e42967dc240e161cb187bc528f2f2a3ee2ab93c53c0b15d820c0921", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:99c31c501752dfcb8460f44b4e363b9d57b85c3ad422a951f13f2d42e5f9f54b", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:8361387196f6c48bbed95c77561bdd324ab96356d6dd0f4874832accc67738a4", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:89169ffed763d6257769d5ed83185a9eb376145baa60dbf01b4088f37aa663bb", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:7727a62bcf612392c76d46f3cc8c22f33c3c87c30a320805ac9844ce68409ecf", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:1ad633f30ae0f80052b31090652780dab90b10696c098ac81ea831035a652835", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:c9cbfb3894148ab693f0c850232f3a1b1aefe5c5cf5f4a06bc74d44cdd2b52f5", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:67b8654cf2404ad763f5343ad3ded35f198c26e99b8a9a150143911acc89ac6c", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:6ce5485715b3caab30a72313601de971e7118bc2997a2edf6ce7b229e51c2483", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:9ff7693f6fb994a0a53dc46230b7ce6c4fe6dccc2b2ec2c8ba49f7c1e3f24eea", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c888a4b5fc698c1bf7551bfbc6d6ea7673a5f7f41d2467af7e15ce634c71e2be", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:1ed05c5ce069437c9de8a57326a0329d883ec753f3a11fe4f70a43ad212ec482", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:191a531a60c27b33fadbdb48213980f03b68efec3287545eff3592fcdf4bf686", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:e6f726edf701657c80853712b94a4bf5dd0430254d93db45804e60a243c51818", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:8a6ef7c3d8ee76e112224e0c4e0b91572db8c85f547bbed6d7ce3f6f6d4383de", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:cc162915cf1fc3cc66616c3224e9e848485198a28868c237adc9d7077791cba8", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:74b5c41b13bd849ce82040012f557fec4b9cfad3a9072f9f17f78400868da558", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:91acb71305dfde220ce7574e2ac67af16e6f8630639dc66d494cbf8120d2d07a", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:9b8a5c1ff4c21846701eb5e0603cc022f4530c568db6d9fab392e41c0ed64720", "enabled": 1 } }, "slrnpull": { "100": { "checksum": "sha256:bcf004c239b72d23fb4f1e5842272bc20f287cd312ed394464db8cb9218f4377", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:fc3eaf23ee99b98d2ff17a5df04776e8553f490d7f57d49a24061cd49bfaa997", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:17d8fa5ce4b9402dfb10ad431241cb2a5a1b2f726caa03ae7f1d7d410c2ab6ae", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:6506687dbaf850c784d6f2af14197d3c1768514fad98e08fea69e92a780ff65f", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:59b6f3643d2f404ef03d749628b6872fd650b5b10851862b4accad8276bc6f29", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:34b45f69552f2b284b1f6e0876e4a96d1c05c28e4ab42d2bc2a241c03fa73309", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:35ef9c580c4071208af6169ae1059bfee51938d36dbec2bc2354d51ed5dc505d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:5594f07c04c9057b74df1612012c2515265ee04d58b11bfa46a73531b703c1f7", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:b00a50f92d0e8ef2789d03756c7bee69f983edfc4a3f409304835ad25133e3a4", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:874410d4edbbd1f73ef0e69ea40e93054a5d65cfe1556b00f6b474b928400a39", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:400e9b1c9ace97d2e43b5916b453d189a5c6f60133876f15672a48607edfd0ba", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:66beadff1a4ed7e48b3f3cee1444f5f1aaa833d212cdc76068f2f306b8455970", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:fd8c0b8cc073d8025ab8754b7885e0375b4e700dd3fcc921c45666829b652de5", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:1b2a0e330daa04838742fdcd50a9b539072c58d48e949e4a3ce7933da47cbe3c", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:2ab07a8deeb7ef4cf09f94bd2ba250166a4d016bd9c581ddd470ab2784baf5e3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:e7caeb60df6f2002f7be4adc7a1506b6fb585e6bb9f4585381c115a90bff4a15", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:836d01ecc314a2b2b4eaaea69ce1e4a03f3274bd8bd25e2b64d0329e6f9d8f32", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:e2c86cd06c00d3ed79b9f7a602b18593d5929156df58e761a04a3cc3ba8be891", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:67fec37a17724a9b059f936b70c199d96906b9bbf703dd8a1670852dbfc7715f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:dd116a718e125ba88d28936b746a2292088080254134d2001084e2d252ce9379", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:df73dbc3f1e232bb5f4d3ba0bd1850eae3c3bc401508b1819c0989b8f67f8033", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2eb63b8ac8f3038eb1ff3bc18fc5923dee4ac3f609d8a14791300ae835249a9a", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:d342a188298c1fcd4df99c4235985c50ba2f02a4e53d01cef3de48bc31464ceb", "enabled": 1 } }, "switcheroo": { "100": { "checksum": "sha256:f8f67d2c990489a09a436dbd72704b13d6617fdbbb8c5c2c040a85b584de6a7b", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:a8f135ef10becc2a2ffd4e7faf89932ed4aff16331eb62d59e52ff2a5c0966e7", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:fc1ca3d8b12406dfef9f012c9275817169fbfafc411969e60d357be3b35835a8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:ab2acab6cbf273ed7e78e577b0e2a85225adba387b1a8908b180b07adb950e6f", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:815d229f0b5a8f8a44cd511b5927febb002596a8aad1b85406d674e59378a0e5", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:2a643246c63d64d4c57f3877ff3daca2637b195330920c2efd840ebade3fc20b", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:f3896d2de3794d7dd54fea03cbebcdf4e6b63bcc512d2fc14433b3be400f4188", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bbfd79953db88f6db10739803d29b003d83311a21c75604d64ed9fae26da541a", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:71c6423e6318342438fea1ba8a38751b5741b4482ca8ed075dbdd36bc6fda9aa", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:f482585c8f26517c6ed8e9203bec4adadec8ebc65840089d7483e31ee24fa679", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:a5312c216b56620ca8e69679e99275e793b3de9b6e524db1a5678d22b9909056", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:3a4e10afbea76bb0a825f3e10b6be09c1e380f19737aef7a6171a9744c15b33f", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:58aac19837bee6fd1c5e3d1e2a9c9900c56b9aff34b643fa9d958399152afbce", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:46f7b10654f710546a61324618f68b753849ea0b6a7e11f431922a5c848fae89", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:f3d5b0012a6f6d0255e831f608cf0d77f1af38a975b222a7f71cf0821f359246", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:2d749a0f3d39317412feb3388eec0eacb60859891ea7da50373271f03ab66c5a", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:5b1a3e31fee719423530b8c7c07b6649ab539d38f2b446a3e6d3f029a65696ae", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:561814e9fa4d9ffa1be3bcc8e27ee1a50260293a17de3db6eb9d4a83e14e8faf", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:48fac9542e02d0c8f461e03905339795331b4fcb2082e830e83189e50af59040", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:80d84cb83923e4d5d6b9870b4311a67c87609f010c5ffcdcb00ef6e926a8d785", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:33a8bba7a36dc094b6220c0dfe282a9e57ff280511965c99d654f4e584f960f0", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:38e42ce3f0baba47216f3b50d7bec9ac531a11d659c8807d0bb43b5e5b4ce873", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e9267049c61e87edd481214c8cedfc02cb396789c52a150b58d8fbf0401bd455", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:2f55ef3a5145328ed09f316753cec5b85f67c1b43902be5152fc57c4b95c3026", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:51ec0952bf860ec23e3bfdfd53f3bfad841a4e5b560cc25a9548c9b207504194", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ef06a218a285a5a01a1e354d6a40f826815203dc323d00ad68e29f85162c24e7", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:f71781a997aa0d0df5c9baa600b6212105c75cc290bf634a198ed0d5b42a668d", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:f58eadcb76889082e3a109afa993bc7eeed39675991d171a13744bc8b61c279a", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:4b8e317234ae08c1f4a80133c8abba35d412f5797db3c4515d0cf051c35af6bd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:3c2a65084450b2459115a69bb1d382e452a1da63080ac7fdc85bcac36affe1c7", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:ca220cb87bf9790b38738b6f08cc800a2fd0e083960aa4770c9385b897cd31cd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:cfcecf645d2d8a59f98135435d535133a39f70f46d9b47a65b15e88a3805861a", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:91a33317bdd39510dd305d768e2791d08b207d8384bfca22322ec49f5b26f9bd", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:c500e8df08994b81cc1d743db684060d03bfe4465fc12eea9a4af83a69af307b", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:db1d0917d263b447f9a744edfd4ebfeca697182c853295c7eaf49f1270218858", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:84679e67832759be8220885abe3fa0157305fc8f50efa604b1343e99907925dc", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:5ca3d53e3b62d5973442d210faf9b9f5f9b5f4935a74074ce4b18836c8d78b19", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d8fadd99af0d343c815f006330529911a5106641ed9c7d22a2eb72e0d9d55d2d", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:664ab4aa1e1eca422d2c627a22a9631ac348221893713bd9a4d97a628094b1b0", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e68a71817476b5ebb8ae2e13e9ea9418a31dd64ffe4e156258cb77029635cefa", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:f45c6d89a3305814e44a05c0d8c8f8a4ce8a923d721e83c9579f76d8d8cd909d", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:8d828eef8065f2486b815aea04ed491419e3bf17508cf0ce595fca71f872ba38", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:76a11dd14f578f940e874ab4d68ca1370ddfcb2585b6a3a955569fadb77d269f", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:17759c6e3a6229e4a40be0b8121751d768f00fd6ea0a872f4fe65bebe2280b30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:c9c26249a11c4bace4efa998ae826c3cd5178a19d323886a62b7e355ca3d8260", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:ea826918681193d37db69c814ee4c753fef3fcca809cd0fad6f924f829eeb9eb", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:a9e221f7f656f9f0b4937c2bd0f7b93124c7f48f4c88fe8ba608db1eaa5f05d1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:034bceb856cf79ac9329a4affb6cc53cf29c5bebb089c0ddd486a76148812b89", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ea40fa389e6fc510f40994b9b4272a6b985c80064b8a4d702d5813d5252487f5", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:308910f855a076bdf38241880815f6640dfba4b21ef1be58112deec3ed858d16", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:dd07546e8a114e1b7f5056d4c5b0f1256050fe93e867fbbb6c5f52d2c6f77ec6", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:870a818c9c3a4e4d24386bfc3fc7565af1c8aeec605b3d4cd819169172bb3e03", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:476c08aa43723ad6bb98a7254bc6cdad6ddab4aa63336719c192bbf6f5ba6700", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:e27315e58a548c06561117f2dcf86c67e6937dc1ef2071ee612975457091e40c", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:a077f44cc6d16684de9a93061ee0f7b212e3f729fdbdf594dee573fe5c30817d", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:8228eda847eeaa7529b089edb8c64763d03100e84117526a67fbb41ea006a2b0", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Set SELinux modules facts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:164 Saturday 07 March 2026 11:44:56 -0500 (0:00:02.743) 0:01:42.089 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:7bd953bc370c70fe9299b766f8a40a1659e03f7ef4dd6c722c3e182bc90c1c68", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:e8caedff457d24c0562673868860f813a6cf223422bc48524e7cf1e8df7ddeb6", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:1150e95aa33304027895200fbac6de5d0ec1ada237d1cf255f979bcf712831ba", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:634c80be00ac898add54ea6d59ead5a6e92e4d06a230b9b4485059070b0a3bde", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:90f08987cd8645d1bc99245841a9f2d0c9858196064df233655623d1b5cfbdde", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:c59e1e8e511ef99a0e5715ed9dd2c15ea0b522186e683ed8bf715029c4ef325c", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:ee1199b88bcd39ff6de202bdef25f1dc7292828d80856fa535fb80454dad000e", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:3b9f22d94579c8dd60f827159f6f15a2085d9bb799cbc88d7c1d23ce7a63aab4", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:449d303fa3e44bb7afa7b0a715e9566e1e33fd3368aee1b078529f0225cf56ff", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bfefb6205876b2f58e84c1952c749c146f4e2b8107a660e084614b23d60300c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:3a903d39c2d9de406f33790f234fde1f1d0b20bacae36fa0c6bfb5fee9f800c5", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:35030bf2d1dc7ec055a954de113ff7918709262d5c318040b0cbd07018e9ee88", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:5da016180d7da3fa18541f72cc69eb5c9ffebc2851ec3e6150bfd5a73153f860", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:6432b280ab64da2e35f7df339167f29bc9b9dca4c01e8e8a0c409b7a0adbd5d1", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:856e89b68ecf997f8a33e98c7e4bd2250a43f88790efba170f787434139a8c0b", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:78ff1f7154a00c128cbf5c237452baf7ed1cd46cb11378439b64432d1db58d4f", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:1e517a22f8a71ea3ef177798685dfb6359b1006205fdc97a0972ff1cf7125f40", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:454cc3d74ae64acf78ad17344d47579841f6b44266c6c3d56f58594918d2e3cc", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:4d13ddead5cb94be9c944061044e0bd56974a9db9df64f7259593b57d51386d5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:00bebe07cf015d4084870d1f0866913ae687801ca2d26e12c00df6823b3bc304", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:0cb5bf9ff94cee18667b41dc4d1b988ace9baa06ca99507a91ff3190f4e39d35", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:233825c029885cb6196920f19b27336b444411b9a15b956c95a2a07b89e9b041", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:afaeabb15d1d5e4f3d07865c5213f4a78ae5865d0f782e95d1c599e61b7ed7d3", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:2c8ef6be5667ad71b144c8bd4ec606b56cecd4e3ea1d242cbc657c1c993d99af", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dd35cbec0b5e8f81e3394a60905606fb9d986fd394ad60ccedfcdb60f0137b0b", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:e89032180210c66a288c43d2de3a47b285d38fa239226bd49ae19a1a0488f41c", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:96474cc59c799aa0e25123ea9909b4fb319a03f1b5f6cbbf1ae3dcda374815a7", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:7c8fa6c136fc6624a1dd4345c3484ffbc07c9a4be8b7543d78f0615680cb73cc", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:1b066f5d029b5584d34d95007991d218446244f994f3ff802339cd5890e48091", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:60ca58fba194f53faf1c0bc41f8eeeba9ca3de6f2da08f8940b6d1d3093e7c0f", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:815d2bba5c316d5d0334add30dca473daf3fdc85e48785c26c7b47b2ef833823", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:4ec687f59310bcb03685bec14fec451d393508d1ca5f926209ba967d42673d90", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:b6821587c3b2df8dc3ce8de9851cb1be120dfd68e5729141e7a293917029e978", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:df9850293d6833d206bfb3a875bdf69d0823daf24993b30f962da683032555e2", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:de4651616a6c8dea0dd4b018d3ab32c1506ba75188d1bcab2e04af461eea6040", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:91ab7c5c9df2a80b515c52b105f54e9247b092be7864be939d880b2f94cec862", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:bec8a93b694c60226db8744867c6f87775440937699ac0d023e06e7b7aee1d6b", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:3f5f3b049123ab0a61d1f7a7e6372bd7d2194feb212f2b5bd85a9148f21f7db6", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:0ae822bb67f347f0a88f4ec8584f394e3e10fc11363dcf34b1d583305e76c9e6", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d20dacb3b990c66c37bbf1bbd081a84a0e35f3cdf1501c27a5ec881c3d187d84", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:090e59b1324bf559d79a1ef363fe9bc1bd2adb928f6a95bb1628c92f93063415", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:80b987a686635b3e05bedf481ef892af7231100a61fbf6ca5e93da17dbb887c3", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:9fa130934871404f743c4803af509afa78e56b3ba2f83bd108564858f163329f", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:a68798c10fa97ddee5f54ac1d1281ecce65750e4e151076f4ad826187fc647a2", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4e04381e36d9df4d9f19ad718b1ddf4686f633f72b24d1161055b1f7280a81d4", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:33c562fd35e8b9fc5fdf807c488d1ac4adfa6c3b92dbbf87034a6732478e1bf7", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:8279ce237a5b4ffe5a80db09e71f06bdc8a4838910274ffc4e240ec99c185df5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:f89476b4ce6acf51cb0628609027a6c44a90db4ccde4da07505b5332a00b7c63", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:8a072efaf9d6f3af5ec04477f28ec73585274598b69d2e8f24c8180dcfacb15c", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:d5d67d239ca7cd2acbd4c5e15fbbc0f97810139fd352e9966c1e63a7d6ab5188", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a4da29d700315627bf480c63220b2b639ec0b87435f9ecca111eed86c1e019cd", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:36bd297ee2c16ed1564895422c05f51d957f09ef17120ac2efc93dc46d2d81a0", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:053f0dac3d8bc41d9dcbaf9b3f1c2e55ec313e07465db7462fdacf8fd89ce553", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:97391dbc81358c09228185edb79cadecb15bf8641fe8b6f3cf9ef970d79644ec", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:9fb7d00873d78a196b1fb639f107a92cf007803c7eaa2658eba7ed05081acb99", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:59f3c694a3ba5e60ece2b1ddeb5f5bd4f00fdaa67a5c7aa3a8fe7bd302963523", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:c05ca77b6a73640331abcf4018a9b7f2f3733f9e128bd96d7131ab7ba1fa823c", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:0e54e45a5adaa7cc24e6a273e25693919e92f498e42b8e136b7d7bf29be2d6af", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:629423401aaf5d0f529905a421a461d2f1d7ddbdb94020a140831f8873724c39", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:7ec2279bb83c931e6f379f45255a0727d207838ab55930f7595e0ab1e95b8db3", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:601b41f04bdd9789e01a1158241a17c7c4f937c88adbc75e9bf8875ee7cb0756", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:9f9cfd140d7b13b9679ba8b8d7a59366294db02d816d60af2e00a3fff1f6fed9", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:5d64fbf2f59d2c8ce842a9e8adf39877e41bb1d3e77c374681044aafbd662d7d", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:1ce15bea5149f786d9b714426a2870c43d01107f2e3a6bd4b5b324a166508dbf", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:cd287fe5971d71a4512ad52ad855f427c8b722cf7aec6e884ca646ca3da0df2b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f6643411d4b5fbc33bd87d4b3b1d4ea1b5d3659a2092cdee9ecbd4dd700af416", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:41bc4ffe76c9e5c220822efd68a2e55b1126b38f646b7c4016a36263a89e482d", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:fb9a0c7ec7a8627b89649e44dd9e2d6e4cf70166b2a55f6509f898695510376b", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:8b52f0bebd92342ee6b7e00dfe3e20d3a0f041badd4312b9b22b3d3ab0d1b3b1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:22ed092464b3757fcc58749af15cc33319f406db1747f4b28f74feb123969612", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:7633e1cf2075f6323862d89b5e0072681e64e41895b6caabbc8c6b18223dce9c", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1da30094d8664d16dee43b934829c800003e49304f1540e5b41f9fb12a2df4df", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:6cdf81585aeb903ef5da64551f6bde953aeb48f8623a8d416485847541b7b283", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:1af7de0f7c691873148f17453849b3dee97e78a1e8108755c1c133c05f29b651", "enabled": 1 } }, "distcc": { "100": { "checksum": "sha256:bd9199873915ce6fadfc570fba837765971726dac64a74e1ba74c55dc0b24067", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:1205bd72660c46019cfb8c3a899accaefb280f5f6bda63850ee2b508cc4542d6", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:b799553c2c0ab0abd040196142394a15d429e15b573df56edd0e150295d6993c", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:bdaf9c5be3de423b3d1b72c8bf38e2315fd58ce10ca6a58873c7d3e3a9c8aed2", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:1de79cf621df4cb04b8ee1201f38c91d8a23cfd85928894d4f9a8d3a27dd99e1", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5d8847ac4f68cf59bdc174bc1ce3688f86efbdd4a4563f701cdc74b2fa01504c", "enabled": 1 } }, "extra_varrun": { "400": { "checksum": "sha256:6c694e4be5a9d1895e17048eace0eb110c69a81ab1d1e01d59c2a075e08a4f42", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:58fbe8fa7832fec940b7afc7ffe8e4357ddb5a03a662687b928f84029d81c781", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:c821191e37683fab6a25fa714edaa75bcd7a81760fa8b547c31e40967875a29c", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:09288902a734ceef738fc904463b50798ce700c15059c70d092412b12ead156d", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:9fbdec8e421e1fa27dfea13b163cd0810d404845ee724b6f1b3ca5e6500a42c0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:9144a6012aa7771292a276576f811b7948abf4b7fe2e07f05c66d232d5811055", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:ae1f3ce0ff3a003f1db93dbbe09084b0ba32675b332f9930f23f9f5e66f57204", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:60856e056bdd9de8ffce0f5468846b00616fad40f87d38d5fa73acb74475d83b", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:8d10737fea4fe0dd3ae3725002a8f0c5889a3645ba4894e9dccec01a3e51b3d9", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:260a661a05f5958d32eecc692d9d5350d51ec0ef9e9bf29aad653d8637ceba29", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:e206bfbfcbe748672784fe52a91a1220965bcae5ff57dab458ade953f0b17b80", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:8826b12f85b02168080b03dec5eef5c91283ba1ebf8370022a71170064a97dcc", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:00b8b8e23b9e36087646cffa7c5126b0a402ac38a958930d27fd058f78f67987", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:181e899c092e42a648f7474f936d3413769842e4a0192dbc91cf587cd1547ffc", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:54578edd17537e1639df33aa54a731059844519c32cb8dee24e31b29f499dc67", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:325a80a2f12fed84077e57ac8725cdbd3449114115ac74904280c05c4d9f1597", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:9ac486b2d71758e95a106894de9c4f5b21506e07caba5d3753964556cb042fab", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:0a0e0d24bb9866726e90384d92166829d3c43e6086613b425735544745295adf", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:cc208709ab1c0862004f9576e53a62665826c6cdb5f443eb463d8743cc399769", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:9505b4010a4aafa33b27c1a73f02f7fb2ff720e95ef943b40db387b893b7499a", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:a1966f6618bc0d636a87d83d852abba0b92bcb8aaafe82837b39958954490ad5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:80108836908472e7859b47ff8ba90d2c629f02666a3246c2dc7e6039ee1dc099", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:42e7cda751258014b8bf2492522d20dcc0a1c96027d8261b7996289ad136ee7d", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:840c649229032dfd9b5880f50fcd371e5cc4c87fba7d424f03f3f5f28cb1f686", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:ce63d6d0ffc035614b61d82eae48a44485151cb6e93a0617c782116187ab1ad3", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:3b3f4538fdffe23885b90ece09b6859afc8a0b7f3314b9b4a60bcb9525776725", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:8184e98e265b9082358f87a8a715bf235f96c31008e60541b742525e7f09bce2", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a57b0a11f54bad916a170bf890b15978ad925ccc5e976d9d7b94b6c66f7c2e83", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:fc4a2c076ee26500d58559dfd29fe267a6f1ec33515064c8daa16448b7aaca9a", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:b13286a614402a3538fc0387f3d7abc30085c382a33e83faed9be57f33b63f45", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:37d95ab4a25b542db931edf26632d35e3a969239ff1de338b037e2e5ec506fad", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:1eab1ed96a9f87898b99be5005c598d35dc079b1ab5a7214ceb6e3e5c50f8810", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:6719dc568ff70220e53b2f1ed86d9a395a2f038d99901396022e4dc63d4ae868", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c280b017518cea08d176260a60012fd4d62882dcdf6bc9fc2005c74573b2240c", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:a6e5ded6ba1592d16d507e4f87b6078156d99e9554184a9912a3a91819ebb5df", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:a90844f8b8a25de5abadb4887f1b1ac84367f5ae248d9213a90a39859b3e5df3", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:40b455ce92e388b7f1eb0c65645000ae54076221c2acce0fa34c6f8d29d6ee67", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:392808628481e796663a1b99d1340efca31995d4832ec45fe71a939f12c117e7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:59557d1383fbb0a9586e18a4b129912d3ff989dbb853ed29bd0e27dfc160351d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:c850d134886113631f28665513a0536ca98fce16e53a9b3f146d1449ae9e0ee5", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:208231fcd39727d36f759dca410d8675e5852b7330f966aa86dc6e37c9abb22b", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:593cf420e0ac5523489f53d4b0cf2af0eaf8821d841f947349963159834a764a", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:630a305bf2ae45b8211c97cd029f1ae4247e0a00f936d8595e3cff59570cbd5f", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:104ca47441ca07c42c5e4770c1eae2178d2cdb880a174581032c7f846a05fb6e", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:b0baf75f1edb1c27f1caf49a30874604f82791ee1b1c85c38a06195f8d806b0e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:ba9aeb152542b5bd253d5a6e3b6aeff3e857615f4f42836c19098d45263fb120", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:177e6ff2bd9b8e6800b6138497d26b5cdd005046f6c62f672ecc66701b1251c9", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:32c9122d027bf6229b8cf18a4d45fc63e38c5b0a3656312854833e4342e0e608", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:42c6066d4a0751cb1db4526c055b0527a4d9403b45794571ea0dc4c71a666bec", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:997985873de7774ecab07db71db7974723494b65a569e2f852977c25d381359c", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:80496dfdf52576d83029c83097446766868b289a06aab9e9df110b733594a98e", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:c739061ae87ecfdebea9afd0b8021aa3ea154e8e1ef00ba148c82d225ee0c8d2", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:81d97ceabbc97f1b524d3e0e60904f5225fcc44996a83d9db67b7ef3d8b18075", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:8eecfbe8b3b75068c3c26b6fee1cd79009098d65b962b8a847438e8c31e9d053", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:2ae3ef5124e180523c5f610cbd536ad55c7e0b8e7c551201c29827e59c7c1594", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:34f943a522e251615c58df783c4ace2086a1752a3b69e5cbfef2ec5d42234da5", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:a0a2baa7b6c1d5ed5e5582f7ffc7d5a8cf2d4e7d034f50b1f3d0972fc9674939", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:78f45331782c43239be7330f5b928d9dace6b3ebbfda5e07c1374c462fe06923", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:41297d28af002c4e97c864d3b5ee64f49519b4db72a71b5bf7cd104c2b05af0a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:2d6c154dc940a2c178931902f7e0c0a1e9f9956055f92fc1bc92b1f2143a674d", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:33d8e3fbc9f8f48ff7a69685721a782c9f8b62bbbd1878e9bafefad5bdcf51db", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:653fca3667c90bf30da196ab61d79ee5afe1ae9703324b2512180986eec8d6c2", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:949cb7c7b62d17c998f63d9970d6fefbf5b3d56d65f729bf21a4f6703135e3f4", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:c1e22e4778b465a08d815aaf53d71ba28122b061bef976f522a2304366849a2d", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:a308db644962bd0893fe1b8bc6571460b377f728ac28632852ca3b9c281ed74e", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:9925a9acfb6375d93a08546a581a90375ee8582972cfc9d6884204d538b895e6", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:0c9136b18fb83249b1dd825fd497435d852adfaddc9d618ac4d269843a458317", "enabled": 1 } }, "ktls": { "100": { "checksum": "sha256:f15a20f050208e43060eafa61f63a8e722792b76724c7f2fc44c856879ac70ae", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f2322f689c55de691d98651af5bfece0b87608950ccd1a92e9225cfe47415851", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:454587674794c66f8b25f9e90154c291e81f6ab93d7c8fb3107068cfcefb797d", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:4d05909abe38f75a72561bb28fb279f4771d6886406de5d4665111db56181972", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:dbd4d9d61f7e57925f7a61e0a42d65273d8be168f6e3c77b5467d7b9a93817ff", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:3121357ab50a02cfc634a5fe4250aff89a1418865918569b77a10cd333cc0018", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:3390d25acd3ece1c7404db8c3db0f5c80278d5063fab9c8f4a8bb5584b5ded16", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:bc457c7839567f5943e06ec31f915742988f5e602c918a3a0d46bde5b94b6c78", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:d369ef834c0087ca09871e4dff0128cfc8e39a97e1e3b5bd3001fd752b7af5cb", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:c739c49825488aa1ae74fd218a5718aa3c859cd1205a1ea581710fe539bfbde6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:6a59e4d4df92e3d73d66b34035aaf00f5ca0306da24bd478c72a39c7e7844960", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4196d8e4db83bd37b4e883383dfe8543fb33029b42c557fe5af7e8475b558584", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:5427ae01212227c3a719cd1e5664c1290175bd574d7927903102147fa51989c0", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:7d1a24bbfe8deb3a3d7aaa92bfc9c922baba1476561b92f828aae226fe9dc3c4", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:b772895524eef04c9c79093c837e6033beff39717343d76528a8a85e4a466bb6", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:5017fd004213b4ceaf374bebf74e35a0084faaf6cede37b78769036a05e34b9e", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:7c71eef6360c66869a42a19a34ee30abc1064de8fbbcec0098d2ee57fbedb79a", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:cf5a647f3682f454b850317643416460ce6a7710f3f5fec6b0deac40e3c72e07", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:067389c903715a12a93937a436e3df918c42a4871765668bea50eca4f02212ba", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:6cffe11f14b5c03ba0969f0a3f476455cfac505f2cc1f2d467222a21a3ed7c5c", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1ea9c32ae0a7becd1e1879dd4c4b367d450b2721dd8fc3f771081d1568b450f5", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ea5057da646444d5450ff16e5dcb82ab338e8fd5fcf5f8dd72e782ef18ad1031", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:8de073e5cf69c58d03162e50f5fe7537ac8f90c81f02d2906cb10a910a414ec7", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:7d0336a428c29ae9a91c18857f594a16f74f5a963607fff966e7de78102ff76b", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:0464738bfa038fc9ba7ce06c15abf3ff5c2113083e236dd8b96b5d85b1fb51b7", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:9489c6c732b353e34ed3e5624fe8b73c336f4786c47bc30827b4a5a59b7dca44", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:660ecac63132d47b51afaeea6f55f74e3a6f25141a4d0d28065e094d7cdc6c75", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:b0a2d9c52715e340983df89e8adb304ff3790b2564659fd821843a3f172d46d0", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:04b77283c6d821ca98ecb58ef7bd17f6f185168786887a67f4c71cceeaa0476c", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:ff9433431cb560a4ff03dc02129289a0f78d1909fe1f3954347f18e318c3cdc4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:dc069f3a6c78dc367c39cd7e50fe17948cf9877f3e306f090f1160b07989d503", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6890958fb0f7c357a4a9600c34e21bf6fc9fd8ef36e9a5ad516b3bf2c1d88bd6", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b61027e2a84c3f6fffbc7eb3fd40788bd9dfb036b3e04a8f77d233e10c9f2ec8", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:e08540cc55168dd36811b1962936ffacaa21be50b15b9d5d34fa9d55dfd125d8", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:bd730a6479baa42060a62b9c7346dfe21ce28e1a8a432342aa5f302c2cf8ef86", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:01131128229571749a7f5df2e65e22e9850789bfe386926cb34e91153ca9e88c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:edb0f4d496b429a2b09ff9b1d74bd30126b5ee2265a4370f6e992cf9d696de0e", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:b28911955f6731646cd779f6b89c2255238c3e60e1b93d227ce588484694f755", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8bc2fc39e9a6cef06df178607ff3e17604e86d709575d37a60de5c1fd2b9fead", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:6980bdebf1af99aa6822dc970cd6d5a5b430381aa11e96e40244db39265b5e4f", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:3b235676dff7abd25b2b57fa770833d05561bdd24216f4de1202e9ced52a4f4a", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:33be40fa2b50df5f7234ead34a6471ff1eea62de62445e509c28e5bc8a730364", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:0d4fd8a1f74c8e46c18a93794b305dcccf3d50e9db095b659d996712e2905dc0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:d4f61bea290cce978cbb1653866414f9f848bc56ee6491cf022e9131dd2ff5fe", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:6f174abacc65b0de9248c39a31210eecb6fdbcd15ecff5bc254fb0d366f83806", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5053d74b0f4734131234b4faf6cf7815a725bfd5b73b6acf07deb77a3cced1e2", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:0538a3f6b5c469223bfb2740d7365838eedf7ef65b89353645e9d3bf6e17253c", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:f8b11739918f67700fbef58c2ab5c87a61413acf6aa8b650a014285c0c3684e2", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:a3b7c308fe73bec0edcfceb85e1e1799927a4d7e25ec4314649b447f670a49ef", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:dd752acc5dc10414a4708dc0bc655d7861bfa74bb20863aa10335dacc53357ba", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:bd4724acfb4c0ec9283595e24e29f9926c18e7af0169fd5eb344ed00de6bf393", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f1e989b744c90ee0be0978d34da65a84fdd81e5b6aef8ba116560bc157d73f0a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:d2bd05813a6a5257688f9bb486a1bda49fb169eab4f16c3d503e01883c52bd11", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:03597af2e3a916f7c4eb83e1b360b24cad9e86ce814494bd68da602991a70e7e", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:66173ad07abd0c8bb7e529350399507549601923afeca8e2ff2b0f80cb9992e3", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:3399e9663584d6d1032992f903b7aba4f96f4f0b7a5971faf90eb816cc7655b3", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:c1107cdfed17e78cabd9094b3f6aa1d9537f70bb4ddfc236983cc5fdc167e8ca", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c73d5f710032819a6456d1020ef5fc8bb683aeb167b6169f56a295c31b14c72d", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:d733f8dbbcdcfa398f6f139831236fa6cd0abdf132090435bb647081d2f6a785", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:44657ecdfa5bc1235f85a50222e025ac4721b24a01af6d167525f7cb0a580c31", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:92ded69a63e7ecda34b1d8ef17ffae8c9e8075046a724f8f8242f4b66d2eff19", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:5dc833e3b3dd31a1af446c7883f6a2b92c40b9192d072ef5de2fda7ddf4f84ad", "enabled": 1 } }, "passt": { "200": { "checksum": "sha256:d778011449f026622cc05ab496a39b6aa55a7e6447621a5ff7afc242b155b0e2", "enabled": 1 } }, "passt-repair": { "200": { "checksum": "sha256:7db523cb1e14c32587544907a28237c09c418307c349a9c6c5a0095c9ef22533", "enabled": 1 } }, "pasta": { "200": { "checksum": "sha256:cbdee1f9990db7defe1393b55569dcf01a84786f38a49e923b023c7c87bc2571", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:924bf0bf4f0b2ea9d633ef46f55793acb2eb3da6379bacd355814507e5ddf67a", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:8d6835bdf52f73dfd1acf73ce13ea8325b0bd3d0107b0ba86953fe2fbee20330", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:016a326cb4a747756723c0e7d675e4992e8abfd1f51a6c06aa93066bf45412ea", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:ee292c9774f2109ffcef5b2a1ac7ae68e44f719ba40d155f84287fe03a6c01af", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:5d77621f8da0f789c1b9ea9ac24925e02e0a7fe2a3a26cd7e5f46085277041bc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:6cfcf3051765f61e954cd243d3b652cee14d378e4925b12569512e5ae815b40e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:07669cb2df2c61ec4cb621f3332f77f351facaaf5232a8a72c61a5ee7bb44d71", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:24e235787e311d82b99df7b41d724da0e18edc3bc6443f9f83f8d6247e33cbac", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:2c0350e46ff4eb97af27f63025763c565d7097457d4cde6f46088afe7f8929e9", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6c7d4f4b8227aa55a5f142bbb8faef130cd10710101eb6f0aacb62547db5f49b", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:d59109d36dd2868269eb18631e37feb5981db0aa780c55f7e0fb66d897e4f48c", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:93a95273e16837c24572e635d58446ed1162ecbfed59695e866058df4dcbec2c", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:f878b2cf560b4bdff33fedf8c8f2011af390b77ee8f9416fe93ebf46153c97d0", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:7c128725a61bd30f3e35f39b9a832e5cd3ef435dde58241616b24e28f67ffbe1", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:60153b9f850c92927ce2a61becd9c248ef56dc0ceb7ba990185b98eaa9b011bd", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:ae9f1c81d0877b9f40c9d9bb5b862b7c58c73da9045f850a0a72d1b982fada35", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:8d550f8b9e80beafd06bc1392e60ecba8e922f8d0e609fb6674de5cf27c8d772", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ff82ca8bf6365948aeaf3c14fbc7ea9a212074d1462a31aa676b542d0d76c882", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:664148c3f8d4a649714cdbcf15e4862a5e648e0aea83d4530d23866c78c8d8d0", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:d58fb38422b37d406bf3e79136e3a94a40885c08f9c1591975c9a7495b7f606d", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:8194c7df0ea3abd18f07481b0181e01c5fddb21ebb594ed5b20bc1ced555fb27", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:ef1377e6864d9b5049866f6f0c3986e474499f1bb0082e9430f208e2c9d84b54", "enabled": 1 } }, "qgs": { "100": { "checksum": "sha256:add48a13d9b3cc5c82c73c2ca7d72db10b074970c14e26d58b88f670f9221655", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:c5e1779123c640fc55da0871bfd96bb124d8c9b50b9065136c025c83364f453e", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:71a7ff78c03cde811d19a4c115de8a898007bdf437a9350d4708b3f9142481c6", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:e66ffb20855170cda4ec60840ce05e73d69dcc54330c86b24dd89ee96bcd1d73", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:682232f167f6ecaafcb051df5557addc52b814e923f143bf37a2035fb17315ae", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:0fede9cbfe184d19e8ac7bb68a1ce8a110aa45898ca782e3c9daa5649a476fba", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:01fbaabbb5b83721fe19a813401d94510f6fb260714c3adcc40d54fbb994ef70", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:a8e3e2b90df3917dbaf684a1bdf72432d8bf2aa6ec41233e06a2eaf02aa81686", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:8d5ee75190133ca16f3931a80ba1202b6cc171e6a3b1cba6dc5788a33bc84e0a", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:fdf6e82be7b620aaea9c8928edc39344d32dd9b1c4e0f78a6c6fba39bc005b6d", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4788c42c425e54a8dedb4882a6a2bd2183ad72f980f4217299be830afe275069", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:7d65968a2e3d186de718f9f6604f2cce60bd08bab6dbe0e60f60222b228a5744", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:78d9abb7263a5c028d7065c0cadcfe14daf3b4aa064e679458f3bf271a69d2e5", "enabled": 1 } }, "redfish-finder": { "100": { "checksum": "sha256:e05fc89dc14e7a723647597786aa62adc255ca1301474ff0c29dff49e4176e4d", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:825a97c385fbcbfff670278b26a17f91bbfa8585f2219efc48781e0e510bf213", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:695b31e12a82435b57e11459e99444fec8d09aba051b1a12b8efa765608dc719", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:892885a058782b7fdfb5d86e5ec3ecca261363a14a2254652c6a7ff8a52807ae", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:39bc17cbd08c0377eb935fd0ca86b6542752c5ce07cb0f9d9e5d8adfe4306a13", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:3da6785a2c37296fb1ba2a1b621ebccc9e0837d9acf69b3442e75f3a60f2a484", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:912bf2ea73ebbfd1d5fefee37b336a9002345d01f8eb54cb164c28160fc4f1c1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:66b1ecc6382afc5032df2921281550af0431befd8cd517c4f8c68cab2eac0e11", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:4ed93113b5ea0760e89533919f86cf1dd26b5587a9d7cf8bd951896fc77d7fa9", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:008a840aa2183d0fbf1b3f3bb9542a7ba51c03a1e3a415b188ca49d2e4ed7e51", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:3ba51ade82ac9113ee060bb118c88deccc4a7732312c57576fd72a70f40154aa", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:b4fc4fbb8572088eb785b643f5d103d5791af96d37e6cce850d671d9291bf70f", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:6b4e7757f0422a2c54d93e920ff7b2c5bd894d495065b3827a741a768f042b18", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:702d5df73a6865bc249ffb537ad7a0d2388e1540716e4b2f7e844485870e37bb", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:4cfda0dd9868ff0890c7a612f07c282a8cbe4a319c766d7cf842ed639fc2b34c", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:64c59a71e1786fba000398e05773c83fbbd9f92c0341e52cbefd1386357b4e16", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:2f0c18590911b20c58bbc9db0c9c0c471f4d66171f7400079a2e956366580e24", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:f19a726a7c78ddd9aafcf8d2c4b6a57bd05fdc8450a91119e1f0d0abc09151dd", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:b29d987a469d59767e7120202e2abad06865eaa84d3eb61d2ae6b7a78c1d6dca", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:44e8808dad842eb55d51c204374ef445bd8515701db580d2c91f06ca9949f2f6", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:4b1585496c5777fe140f76f11a62df0ddad219336fac090139efbc368520d38c", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:2a990092d1cf38541a49375e9e605d82515a34e19b9ab6b70392afb596e0c612", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:80bda9a30a4b5ab4b6b14d7f6c92efbfd5a63658a4b44565a02c2c552cf4a28c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:405780af5278be0dd7f89425f91ca1c48527743d2b6876bdbdcc7545d487dc09", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:f76f5b094e42967dc240e161cb187bc528f2f2a3ee2ab93c53c0b15d820c0921", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:99c31c501752dfcb8460f44b4e363b9d57b85c3ad422a951f13f2d42e5f9f54b", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:8361387196f6c48bbed95c77561bdd324ab96356d6dd0f4874832accc67738a4", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:89169ffed763d6257769d5ed83185a9eb376145baa60dbf01b4088f37aa663bb", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:7727a62bcf612392c76d46f3cc8c22f33c3c87c30a320805ac9844ce68409ecf", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:1ad633f30ae0f80052b31090652780dab90b10696c098ac81ea831035a652835", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:c9cbfb3894148ab693f0c850232f3a1b1aefe5c5cf5f4a06bc74d44cdd2b52f5", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:67b8654cf2404ad763f5343ad3ded35f198c26e99b8a9a150143911acc89ac6c", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:6ce5485715b3caab30a72313601de971e7118bc2997a2edf6ce7b229e51c2483", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:9ff7693f6fb994a0a53dc46230b7ce6c4fe6dccc2b2ec2c8ba49f7c1e3f24eea", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c888a4b5fc698c1bf7551bfbc6d6ea7673a5f7f41d2467af7e15ce634c71e2be", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:1ed05c5ce069437c9de8a57326a0329d883ec753f3a11fe4f70a43ad212ec482", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:191a531a60c27b33fadbdb48213980f03b68efec3287545eff3592fcdf4bf686", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:e6f726edf701657c80853712b94a4bf5dd0430254d93db45804e60a243c51818", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:8a6ef7c3d8ee76e112224e0c4e0b91572db8c85f547bbed6d7ce3f6f6d4383de", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:cc162915cf1fc3cc66616c3224e9e848485198a28868c237adc9d7077791cba8", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:74b5c41b13bd849ce82040012f557fec4b9cfad3a9072f9f17f78400868da558", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:91acb71305dfde220ce7574e2ac67af16e6f8630639dc66d494cbf8120d2d07a", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:9b8a5c1ff4c21846701eb5e0603cc022f4530c568db6d9fab392e41c0ed64720", "enabled": 1 } }, "slrnpull": { "100": { "checksum": "sha256:bcf004c239b72d23fb4f1e5842272bc20f287cd312ed394464db8cb9218f4377", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:fc3eaf23ee99b98d2ff17a5df04776e8553f490d7f57d49a24061cd49bfaa997", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:17d8fa5ce4b9402dfb10ad431241cb2a5a1b2f726caa03ae7f1d7d410c2ab6ae", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:6506687dbaf850c784d6f2af14197d3c1768514fad98e08fea69e92a780ff65f", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:59b6f3643d2f404ef03d749628b6872fd650b5b10851862b4accad8276bc6f29", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:34b45f69552f2b284b1f6e0876e4a96d1c05c28e4ab42d2bc2a241c03fa73309", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:35ef9c580c4071208af6169ae1059bfee51938d36dbec2bc2354d51ed5dc505d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:5594f07c04c9057b74df1612012c2515265ee04d58b11bfa46a73531b703c1f7", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:b00a50f92d0e8ef2789d03756c7bee69f983edfc4a3f409304835ad25133e3a4", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:874410d4edbbd1f73ef0e69ea40e93054a5d65cfe1556b00f6b474b928400a39", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:400e9b1c9ace97d2e43b5916b453d189a5c6f60133876f15672a48607edfd0ba", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:66beadff1a4ed7e48b3f3cee1444f5f1aaa833d212cdc76068f2f306b8455970", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:fd8c0b8cc073d8025ab8754b7885e0375b4e700dd3fcc921c45666829b652de5", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:1b2a0e330daa04838742fdcd50a9b539072c58d48e949e4a3ce7933da47cbe3c", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:2ab07a8deeb7ef4cf09f94bd2ba250166a4d016bd9c581ddd470ab2784baf5e3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:e7caeb60df6f2002f7be4adc7a1506b6fb585e6bb9f4585381c115a90bff4a15", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:836d01ecc314a2b2b4eaaea69ce1e4a03f3274bd8bd25e2b64d0329e6f9d8f32", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:e2c86cd06c00d3ed79b9f7a602b18593d5929156df58e761a04a3cc3ba8be891", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:67fec37a17724a9b059f936b70c199d96906b9bbf703dd8a1670852dbfc7715f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:dd116a718e125ba88d28936b746a2292088080254134d2001084e2d252ce9379", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:df73dbc3f1e232bb5f4d3ba0bd1850eae3c3bc401508b1819c0989b8f67f8033", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2eb63b8ac8f3038eb1ff3bc18fc5923dee4ac3f609d8a14791300ae835249a9a", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:d342a188298c1fcd4df99c4235985c50ba2f02a4e53d01cef3de48bc31464ceb", "enabled": 1 } }, "switcheroo": { "100": { "checksum": "sha256:f8f67d2c990489a09a436dbd72704b13d6617fdbbb8c5c2c040a85b584de6a7b", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:a8f135ef10becc2a2ffd4e7faf89932ed4aff16331eb62d59e52ff2a5c0966e7", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:fc1ca3d8b12406dfef9f012c9275817169fbfafc411969e60d357be3b35835a8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:ab2acab6cbf273ed7e78e577b0e2a85225adba387b1a8908b180b07adb950e6f", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:815d229f0b5a8f8a44cd511b5927febb002596a8aad1b85406d674e59378a0e5", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:2a643246c63d64d4c57f3877ff3daca2637b195330920c2efd840ebade3fc20b", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:f3896d2de3794d7dd54fea03cbebcdf4e6b63bcc512d2fc14433b3be400f4188", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bbfd79953db88f6db10739803d29b003d83311a21c75604d64ed9fae26da541a", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:71c6423e6318342438fea1ba8a38751b5741b4482ca8ed075dbdd36bc6fda9aa", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:f482585c8f26517c6ed8e9203bec4adadec8ebc65840089d7483e31ee24fa679", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:a5312c216b56620ca8e69679e99275e793b3de9b6e524db1a5678d22b9909056", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:3a4e10afbea76bb0a825f3e10b6be09c1e380f19737aef7a6171a9744c15b33f", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:58aac19837bee6fd1c5e3d1e2a9c9900c56b9aff34b643fa9d958399152afbce", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:46f7b10654f710546a61324618f68b753849ea0b6a7e11f431922a5c848fae89", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:f3d5b0012a6f6d0255e831f608cf0d77f1af38a975b222a7f71cf0821f359246", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:2d749a0f3d39317412feb3388eec0eacb60859891ea7da50373271f03ab66c5a", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:5b1a3e31fee719423530b8c7c07b6649ab539d38f2b446a3e6d3f029a65696ae", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:561814e9fa4d9ffa1be3bcc8e27ee1a50260293a17de3db6eb9d4a83e14e8faf", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:48fac9542e02d0c8f461e03905339795331b4fcb2082e830e83189e50af59040", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:80d84cb83923e4d5d6b9870b4311a67c87609f010c5ffcdcb00ef6e926a8d785", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:33a8bba7a36dc094b6220c0dfe282a9e57ff280511965c99d654f4e584f960f0", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:38e42ce3f0baba47216f3b50d7bec9ac531a11d659c8807d0bb43b5e5b4ce873", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e9267049c61e87edd481214c8cedfc02cb396789c52a150b58d8fbf0401bd455", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:2f55ef3a5145328ed09f316753cec5b85f67c1b43902be5152fc57c4b95c3026", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:51ec0952bf860ec23e3bfdfd53f3bfad841a4e5b560cc25a9548c9b207504194", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ef06a218a285a5a01a1e354d6a40f826815203dc323d00ad68e29f85162c24e7", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:f71781a997aa0d0df5c9baa600b6212105c75cc290bf634a198ed0d5b42a668d", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:f58eadcb76889082e3a109afa993bc7eeed39675991d171a13744bc8b61c279a", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:4b8e317234ae08c1f4a80133c8abba35d412f5797db3c4515d0cf051c35af6bd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:3c2a65084450b2459115a69bb1d382e452a1da63080ac7fdc85bcac36affe1c7", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:ca220cb87bf9790b38738b6f08cc800a2fd0e083960aa4770c9385b897cd31cd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:cfcecf645d2d8a59f98135435d535133a39f70f46d9b47a65b15e88a3805861a", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:91a33317bdd39510dd305d768e2791d08b207d8384bfca22322ec49f5b26f9bd", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:c500e8df08994b81cc1d743db684060d03bfe4465fc12eea9a4af83a69af307b", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:db1d0917d263b447f9a744edfd4ebfeca697182c853295c7eaf49f1270218858", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:84679e67832759be8220885abe3fa0157305fc8f50efa604b1343e99907925dc", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:5ca3d53e3b62d5973442d210faf9b9f5f9b5f4935a74074ce4b18836c8d78b19", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d8fadd99af0d343c815f006330529911a5106641ed9c7d22a2eb72e0d9d55d2d", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:664ab4aa1e1eca422d2c627a22a9631ac348221893713bd9a4d97a628094b1b0", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e68a71817476b5ebb8ae2e13e9ea9418a31dd64ffe4e156258cb77029635cefa", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:f45c6d89a3305814e44a05c0d8c8f8a4ce8a923d721e83c9579f76d8d8cd909d", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:8d828eef8065f2486b815aea04ed491419e3bf17508cf0ce595fca71f872ba38", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:76a11dd14f578f940e874ab4d68ca1370ddfcb2585b6a3a955569fadb77d269f", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:17759c6e3a6229e4a40be0b8121751d768f00fd6ea0a872f4fe65bebe2280b30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:c9c26249a11c4bace4efa998ae826c3cd5178a19d323886a62b7e355ca3d8260", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:ea826918681193d37db69c814ee4c753fef3fcca809cd0fad6f924f829eeb9eb", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:a9e221f7f656f9f0b4937c2bd0f7b93124c7f48f4c88fe8ba608db1eaa5f05d1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:034bceb856cf79ac9329a4affb6cc53cf29c5bebb089c0ddd486a76148812b89", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ea40fa389e6fc510f40994b9b4272a6b985c80064b8a4d702d5813d5252487f5", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:308910f855a076bdf38241880815f6640dfba4b21ef1be58112deec3ed858d16", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:dd07546e8a114e1b7f5056d4c5b0f1256050fe93e867fbbb6c5f52d2c6f77ec6", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:870a818c9c3a4e4d24386bfc3fc7565af1c8aeec605b3d4cd819169172bb3e03", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:476c08aa43723ad6bb98a7254bc6cdad6ddab4aa63336719c192bbf6f5ba6700", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:e27315e58a548c06561117f2dcf86c67e6937dc1ef2071ee612975457091e40c", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:a077f44cc6d16684de9a93061ee0f7b212e3f729fdbdf594dee573fe5c30817d", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:8228eda847eeaa7529b089edb8c64763d03100e84117526a67fbb41ea006a2b0", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:170 Saturday 07 March 2026 11:44:56 -0500 (0:00:00.088) 0:01:42.178 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:183 Saturday 07 March 2026 11:44:56 -0500 (0:00:00.029) 0:01:42.207 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:191 Saturday 07 March 2026 11:44:56 -0500 (0:00:00.021) 0:01:42.229 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Saturday 07 March 2026 11:44:56 -0500 (0:00:00.030) 0:01:42.259 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Saturday 07 March 2026 11:44:56 -0500 (0:00:00.022) 0:01:42.282 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Saturday 07 March 2026 11:44:56 -0500 (0:00:00.020) 0:01:42.303 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Saturday 07 March 2026 11:44:56 -0500 (0:00:00.018) 0:01:42.321 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Saturday 07 March 2026 11:44:56 -0500 (0:00:00.020) 0:01:42.342 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Saturday 07 March 2026 11:44:56 -0500 (0:00:00.075) 0:01:42.417 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "state": "absent" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: nopull\nspec:\n containers:\n - name: nopull\n image: quay.io/libpod/testimage:20210610\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Saturday 07 March 2026 11:44:56 -0500 (0:00:00.036) 0:01:42.454 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "nopull" }, "spec": { "containers": [ { "image": "quay.io/libpod/testimage:20210610", "name": "nopull" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Saturday 07 March 2026 11:44:56 -0500 (0:00:00.039) 0:01:42.493 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_name": "nopull", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:42 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.037) 0:01:42.530 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:10 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.091) 0:01:42.621 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_handle_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:17 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.052) 0:01:42.674 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_handle_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:24 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.034) 0:01:42.708 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.042) 0:01:42.751 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901838.6646128, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "9117e8a5afa3220d98f04938893af461a8e3008b", "ctime": 1772901831.1052737, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9335075, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1771804800.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1635770157", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.383) 0:01:43.134 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.025) 0:01:43.160 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.025) 0:01:43.186 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:73 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.024) 0:01:43.210 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:78 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.023) 0:01:43.234 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:83 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.025) 0:01:43.260 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:93 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.023) 0:01:43.283 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:100 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.024) 0:01:43.308 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:48 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.023) 0:01:43.332 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube is none or __podman_kube | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:57 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.028) 0:01:43.360 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_systemd_scope": "system", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.040) 0:01:43.400 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/etc/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:70 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.042) 0:01:43.443 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/nopull.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:74 Saturday 07 March 2026 11:44:57 -0500 (0:00:00.044) 0:01:43.488 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:81 Saturday 07 March 2026 11:44:58 -0500 (0:00:00.033) 0:01:43.522 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "systemd-escape", "--template", "podman-kube@.service", "/etc/containers/ansible-kubernetes.d/nopull.yml" ], "delta": "0:00:00.005678", "end": "2026-03-07 11:44:58.326621", "rc": 0, "start": "2026-03-07 11:44:58.320943" } STDOUT: podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:89 Saturday 07 March 2026 11:44:58 -0500 (0:00:00.373) 0:01:43.895 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:2 Saturday 07 March 2026 11:44:58 -0500 (0:00:00.050) 0:01:43.945 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:10 Saturday 07 March 2026 11:44:58 -0500 (0:00:00.025) 0:01:43.971 ******** ok: [managed-node2] => { "changed": false, "enabled": false, "failed_when_result": false, "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-nopull.yml.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount network-online.target systemd-journald.socket basic.target \"system-podman\\\\x2dkube.slice\" sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "\"man:podman-kube-play(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "Environment": "\"PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-nopull.yml.service\"", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/nopull.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/nopull.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/nopull.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/nopull.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-nopull.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3067887616", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-nopull.yml.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target \"system-podman\\\\x2dkube.slice\" -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 10s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Check if kube file exists] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:30 Saturday 07 March 2026 11:44:59 -0500 (0:00:00.570) 0:01:44.542 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901858.8279026, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "d5dc917e3cae36de03aa971a17ac473f86fdf934", "ctime": 1772901858.2305999, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 117440778, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1772901857.962456, "nlink": 1, "path": "/etc/containers/ansible-kubernetes.d/nopull.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 217, "uid": 0, "version": "4109009606", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Remove pod/containers] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:40 Saturday 07 March 2026 11:44:59 -0500 (0:00:00.391) 0:01:44.934 ******** changed: [managed-node2] => { "actions": [ "/usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/nopull.yml" ], "changed": true, "failed_when_result": false } STDOUT: Pods stopped: 992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09 Pods removed: 992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09 Secrets removed: Volumes removed: TASK [fedora.linux_system_roles.podman : Remove kubernetes yaml file] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:59 Saturday 07 March 2026 11:44:59 -0500 (0:00:00.555) 0:01:45.489 ******** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/ansible-kubernetes.d/nopull.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:64 Saturday 07 March 2026 11:45:00 -0500 (0:00:00.447) 0:01:45.936 ******** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "-f" ], "delta": "0:00:00.028742", "end": "2026-03-07 11:45:00.815134", "rc": 0, "start": "2026-03-07 11:45:00.786392" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:75 Saturday 07 March 2026 11:45:00 -0500 (0:00:00.461) 0:01:46.398 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:13 Saturday 07 March 2026 11:45:00 -0500 (0:00:00.062) 0:01:46.460 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 07 March 2026 11:45:00 -0500 (0:00:00.041) 0:01:46.502 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.037) 0:01:46.539 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:93 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.037) 0:01:46.577 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.037) 0:01:46.615 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.034) 0:01:46.649 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:205 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.034) 0:01:46.684 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:214 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.032) 0:01:46.717 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.044) 0:01:46.761 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.044) 0:01:46.805 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.038) 0:01:46.844 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.034) 0:01:46.878 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.032) 0:01:46.910 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.028) 0:01:46.938 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.029) 0:01:46.967 ******** skipping: [managed-node2] => (item=RedHat.yml) => { "__vars_file": "RedHat.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "__vars_file": "CentOS.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "__vars_file": "CentOS_10.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "__vars_file": "CentOS_10.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.podman : Run systemctl] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:52 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.059) 0:01:47.026 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Require installed systemd] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:60 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.028) 0:01:47.055 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:65 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.027) 0:01:47.083 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 07 March 2026 11:45:01 -0500 (0:00:00.029) 0:01:47.112 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 07 March 2026 11:45:02 -0500 (0:00:01.114) 0:01:48.227 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 07 March 2026 11:45:02 -0500 (0:00:00.024) 0:01:48.252 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 07 March 2026 11:45:02 -0500 (0:00:00.038) 0:01:48.290 ******** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 07 March 2026 11:45:02 -0500 (0:00:00.029) 0:01:48.320 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 07 March 2026 11:45:02 -0500 (0:00:00.025) 0:01:48.345 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 07 March 2026 11:45:02 -0500 (0:00:00.028) 0:01:48.374 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.021847", "end": "2026-03-07 11:45:03.186666", "rc": 0, "start": "2026-03-07 11:45:03.164819" } STDOUT: podman version 5.8.0 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 07 March 2026 11:45:03 -0500 (0:00:00.385) 0:01:48.760 ******** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.8.0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 07 March 2026 11:45:03 -0500 (0:00:00.030) 0:01:48.790 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 07 March 2026 11:45:03 -0500 (0:00:00.033) 0:01:48.823 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(podman_quadlet_specs | length > 0) or (podman_secrets | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 07 March 2026 11:45:03 -0500 (0:00:00.028) 0:01:48.852 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 07 March 2026 11:45:03 -0500 (0:00:00.022) 0:01:48.874 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 07 March 2026 11:45:03 -0500 (0:00:00.038) 0:01:48.913 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 07 March 2026 11:45:03 -0500 (0:00:00.027) 0:01:48.940 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:10 Saturday 07 March 2026 11:45:03 -0500 (0:00:00.045) 0:01:48.986 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_handle_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:17 Saturday 07 March 2026 11:45:03 -0500 (0:00:00.032) 0:01:49.019 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_handle_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:24 Saturday 07 March 2026 11:45:03 -0500 (0:00:00.030) 0:01:49.049 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 07 March 2026 11:45:03 -0500 (0:00:00.046) 0:01:49.096 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901838.6646128, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "9117e8a5afa3220d98f04938893af461a8e3008b", "ctime": 1772901831.1052737, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9335075, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1771804800.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1635770157", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.423) 0:01:49.519 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.051) 0:01:49.571 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.043) 0:01:49.614 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:73 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.042) 0:01:49.656 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:78 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.095) 0:01:49.751 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:83 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.029) 0:01:49.781 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:93 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.034) 0:01:49.815 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:100 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.031) 0:01:49.847 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.025) 0:01:49.873 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.065) 0:01:49.939 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.042) 0:01:49.981 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.024) 0:01:50.006 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.023) 0:01:50.029 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.044) 0:01:50.074 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.026) 0:01:50.100 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.024) 0:01:50.125 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.045) 0:01:50.171 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.025) 0:01:50.196 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.024) 0:01:50.220 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.059) 0:01:50.280 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.031) 0:01:50.312 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.024) 0:01:50.337 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.024) 0:01:50.361 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Saturday 07 March 2026 11:45:04 -0500 (0:00:00.025) 0:01:50.386 ******** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 07 March 2026 11:45:05 -0500 (0:00:00.139) 0:01:50.525 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:2 Saturday 07 March 2026 11:45:05 -0500 (0:00:00.040) 0:01:50.566 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:10 Saturday 07 March 2026 11:45:05 -0500 (0:00:00.037) 0:01:50.604 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_ostree is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:15 Saturday 07 March 2026 11:45:05 -0500 (0:00:00.039) 0:01:50.643 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_ostree is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:22 Saturday 07 March 2026 11:45:05 -0500 (0:00:00.035) 0:01:50.678 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:27 Saturday 07 March 2026 11:45:05 -0500 (0:00:00.043) 0:01:50.722 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:31 Saturday 07 March 2026 11:45:05 -0500 (0:00:00.047) 0:01:50.770 ******** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_10.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_10.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 07 March 2026 11:45:05 -0500 (0:00:00.106) 0:01:50.877 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Run systemctl] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:5 Saturday 07 March 2026 11:45:05 -0500 (0:00:00.073) 0:01:50.950 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Require installed systemd] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:12 Saturday 07 March 2026 11:45:05 -0500 (0:00:00.032) 0:01:50.983 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:17 Saturday 07 March 2026 11:45:05 -0500 (0:00:00.030) 0:01:51.013 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 07 March 2026 11:45:05 -0500 (0:00:00.030) 0:01:51.044 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34 Saturday 07 March 2026 11:45:06 -0500 (0:00:00.804) 0:01:51.848 ******** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:39 Saturday 07 March 2026 11:45:06 -0500 (0:00:00.025) 0:01:51.874 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:44 Saturday 07 March 2026 11:45:06 -0500 (0:00:00.025) 0:01:51.899 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:8 Saturday 07 March 2026 11:45:06 -0500 (0:00:00.022) 0:01:51.921 ******** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:17 Saturday 07 March 2026 11:45:06 -0500 (0:00:00.030) 0:01:51.952 ******** skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'nftables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'iptables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'ufw', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:27 Saturday 07 March 2026 11:45:06 -0500 (0:00:00.037) 0:01:51.989 ******** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2026-03-07 11:44:04 EST", "ActiveEnterTimestampMonotonic": "560506770", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service sysinit.target system.slice dbus.socket dbus-broker.service basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2026-03-07 11:44:03 EST", "AssertTimestampMonotonic": "559785386", "Before": "network-pre.target shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "488286000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2026-03-07 11:44:03 EST", "ConditionTimestampMonotonic": "559785383", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ip6tables.service iptables.service shutdown.target ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4787", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainHandoffTimestampMonotonic": "559816912", "ExecMainPID": "14191", "ExecMainStartTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainStartTimestampMonotonic": "559787917", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2026-03-07 11:44:03 EST", "InactiveExitTimestampMonotonic": "559788901", "InvocationID": "36851f8a440d4a018e15c301930b223c", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "14191", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3045257216", "MemoryCurrent": "34066432", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34332672", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "tmpfs", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target dbus.socket dbus-broker.service", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2026-03-07 11:44:04 EST", "StateChangeTimestampMonotonic": "560506770", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:33 Saturday 07 March 2026 11:45:07 -0500 (0:00:00.584) 0:01:52.574 ******** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2026-03-07 11:44:04 EST", "ActiveEnterTimestampMonotonic": "560506770", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service sysinit.target system.slice dbus.socket dbus-broker.service basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2026-03-07 11:44:03 EST", "AssertTimestampMonotonic": "559785386", "Before": "network-pre.target shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "488286000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2026-03-07 11:44:03 EST", "ConditionTimestampMonotonic": "559785383", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ip6tables.service iptables.service shutdown.target ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4787", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainHandoffTimestampMonotonic": "559816912", "ExecMainPID": "14191", "ExecMainStartTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainStartTimestampMonotonic": "559787917", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2026-03-07 11:44:03 EST", "InactiveExitTimestampMonotonic": "559788901", "InvocationID": "36851f8a440d4a018e15c301930b223c", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "14191", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3052097536", "MemoryCurrent": "34066432", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34332672", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "tmpfs", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target dbus.socket dbus-broker.service", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2026-03-07 11:44:04 EST", "StateChangeTimestampMonotonic": "560506770", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:39 Saturday 07 March 2026 11:45:07 -0500 (0:00:00.574) 0:01:53.148 ******** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:48 Saturday 07 March 2026 11:45:07 -0500 (0:00:00.048) 0:01:53.197 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:61 Saturday 07 March 2026 11:45:07 -0500 (0:00:00.026) 0:01:53.224 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:77 Saturday 07 March 2026 11:45:07 -0500 (0:00:00.021) 0:01:53.246 ******** ok: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "15001-15003/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.570) 0:01:53.817 ******** skipping: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "port": "15001-15003/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:141 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.049) 0:01:53.866 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:150 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.035) 0:01:53.902 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:156 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.025) 0:01:53.928 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:165 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.024) 0:01:53.952 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:176 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.023) 0:01:53.975 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:182 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.021) 0:01:53.996 ******** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.031) 0:01:54.027 ******** redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean included: fedora.linux_system_roles.selinux for managed-node2 TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.130) 0:01:54.158 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.040) 0:01:54.199 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.037) 0:01:54.236 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.049) 0:01:54.285 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.029) 0:01:54.315 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.030) 0:01:54.345 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.026) 0:01:54.371 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.025) 0:01:54.397 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['python_version'] is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Saturday 07 March 2026 11:45:08 -0500 (0:00:00.049) 0:01:54.446 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: python3-libselinux python3-policycoreutils TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Saturday 07 March 2026 11:45:09 -0500 (0:00:00.823) 0:01:55.269 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['os_family'] == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure grubby used to modify selinux kernel parameter] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Saturday 07 March 2026 11:45:09 -0500 (0:00:00.069) 0:01:55.338 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: grubby TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:67 Saturday 07 March 2026 11:45:10 -0500 (0:00:00.807) 0:01:56.145 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: policycoreutils-python-utils TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:81 Saturday 07 March 2026 11:45:11 -0500 (0:00:00.790) 0:01:56.936 ******** skipping: [managed-node2] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:86 Saturday 07 March 2026 11:45:11 -0500 (0:00:00.071) 0:01:57.008 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:91 Saturday 07 March 2026 11:45:11 -0500 (0:00:00.024) 0:01:57.033 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:98 Saturday 07 March 2026 11:45:11 -0500 (0:00:00.025) 0:01:57.058 ******** ok: [managed-node2] TASK [fedora.linux_system_roles.selinux : Run systemctl] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:8 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.870) 0:01:57.928 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Require installed systemd] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:15 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.029) 0:01:57.958 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:20 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.027) 0:01:57.985 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:29 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.026) 0:01:58.012 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(not selinux_state is none and selinux_state | length > 0) or (not selinux_policy is none and selinux_policy | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:40 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.039) 0:01:58.052 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['selinux']['status'] == \"disabled\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:52 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.028) 0:01:58.080 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [Add or remove selinux=0 from args as needed] ***************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:56 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.033) 0:01:58.114 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __update_kernel_param", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:70 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.040) 0:01:58.154 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:77 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.032) 0:01:58.187 ******** skipping: [managed-node2] => { "false_condition": "ansible_facts['selinux']['status'] == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:82 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.039) 0:01:58.227 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:90 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.026) 0:01:58.254 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:95 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.027) 0:01:58.281 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:100 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.022) 0:01:58.304 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:105 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.029) 0:01:58.333 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:110 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.025) 0:01:58.359 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:121 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.020) 0:01:58.380 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:134 Saturday 07 March 2026 11:45:12 -0500 (0:00:00.022) 0:01:58.402 ******** ok: [managed-node2] => (item={'ports': '15001-15003', 'setype': 'http_port_t'}) => { "__selinux_item": { "ports": "15001-15003", "setype": "http_port_t" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "15001-15003" ], "proto": "tcp", "setype": "http_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:146 Saturday 07 March 2026 11:45:13 -0500 (0:00:00.579) 0:01:58.981 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 Saturday 07 March 2026 11:45:13 -0500 (0:00:00.027) 0:01:59.009 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:7bd953bc370c70fe9299b766f8a40a1659e03f7ef4dd6c722c3e182bc90c1c68", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:e8caedff457d24c0562673868860f813a6cf223422bc48524e7cf1e8df7ddeb6", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:1150e95aa33304027895200fbac6de5d0ec1ada237d1cf255f979bcf712831ba", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:634c80be00ac898add54ea6d59ead5a6e92e4d06a230b9b4485059070b0a3bde", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:90f08987cd8645d1bc99245841a9f2d0c9858196064df233655623d1b5cfbdde", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:c59e1e8e511ef99a0e5715ed9dd2c15ea0b522186e683ed8bf715029c4ef325c", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:ee1199b88bcd39ff6de202bdef25f1dc7292828d80856fa535fb80454dad000e", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:3b9f22d94579c8dd60f827159f6f15a2085d9bb799cbc88d7c1d23ce7a63aab4", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:449d303fa3e44bb7afa7b0a715e9566e1e33fd3368aee1b078529f0225cf56ff", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bfefb6205876b2f58e84c1952c749c146f4e2b8107a660e084614b23d60300c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:3a903d39c2d9de406f33790f234fde1f1d0b20bacae36fa0c6bfb5fee9f800c5", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:35030bf2d1dc7ec055a954de113ff7918709262d5c318040b0cbd07018e9ee88", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:5da016180d7da3fa18541f72cc69eb5c9ffebc2851ec3e6150bfd5a73153f860", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:6432b280ab64da2e35f7df339167f29bc9b9dca4c01e8e8a0c409b7a0adbd5d1", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:856e89b68ecf997f8a33e98c7e4bd2250a43f88790efba170f787434139a8c0b", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:78ff1f7154a00c128cbf5c237452baf7ed1cd46cb11378439b64432d1db58d4f", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:1e517a22f8a71ea3ef177798685dfb6359b1006205fdc97a0972ff1cf7125f40", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:454cc3d74ae64acf78ad17344d47579841f6b44266c6c3d56f58594918d2e3cc", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:4d13ddead5cb94be9c944061044e0bd56974a9db9df64f7259593b57d51386d5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:00bebe07cf015d4084870d1f0866913ae687801ca2d26e12c00df6823b3bc304", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:0cb5bf9ff94cee18667b41dc4d1b988ace9baa06ca99507a91ff3190f4e39d35", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:233825c029885cb6196920f19b27336b444411b9a15b956c95a2a07b89e9b041", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:afaeabb15d1d5e4f3d07865c5213f4a78ae5865d0f782e95d1c599e61b7ed7d3", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:2c8ef6be5667ad71b144c8bd4ec606b56cecd4e3ea1d242cbc657c1c993d99af", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dd35cbec0b5e8f81e3394a60905606fb9d986fd394ad60ccedfcdb60f0137b0b", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:e89032180210c66a288c43d2de3a47b285d38fa239226bd49ae19a1a0488f41c", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:96474cc59c799aa0e25123ea9909b4fb319a03f1b5f6cbbf1ae3dcda374815a7", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:7c8fa6c136fc6624a1dd4345c3484ffbc07c9a4be8b7543d78f0615680cb73cc", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:1b066f5d029b5584d34d95007991d218446244f994f3ff802339cd5890e48091", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:60ca58fba194f53faf1c0bc41f8eeeba9ca3de6f2da08f8940b6d1d3093e7c0f", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:815d2bba5c316d5d0334add30dca473daf3fdc85e48785c26c7b47b2ef833823", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:4ec687f59310bcb03685bec14fec451d393508d1ca5f926209ba967d42673d90", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:b6821587c3b2df8dc3ce8de9851cb1be120dfd68e5729141e7a293917029e978", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:df9850293d6833d206bfb3a875bdf69d0823daf24993b30f962da683032555e2", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:de4651616a6c8dea0dd4b018d3ab32c1506ba75188d1bcab2e04af461eea6040", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:91ab7c5c9df2a80b515c52b105f54e9247b092be7864be939d880b2f94cec862", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:bec8a93b694c60226db8744867c6f87775440937699ac0d023e06e7b7aee1d6b", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:3f5f3b049123ab0a61d1f7a7e6372bd7d2194feb212f2b5bd85a9148f21f7db6", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:0ae822bb67f347f0a88f4ec8584f394e3e10fc11363dcf34b1d583305e76c9e6", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d20dacb3b990c66c37bbf1bbd081a84a0e35f3cdf1501c27a5ec881c3d187d84", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:090e59b1324bf559d79a1ef363fe9bc1bd2adb928f6a95bb1628c92f93063415", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:80b987a686635b3e05bedf481ef892af7231100a61fbf6ca5e93da17dbb887c3", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:9fa130934871404f743c4803af509afa78e56b3ba2f83bd108564858f163329f", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:a68798c10fa97ddee5f54ac1d1281ecce65750e4e151076f4ad826187fc647a2", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4e04381e36d9df4d9f19ad718b1ddf4686f633f72b24d1161055b1f7280a81d4", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:33c562fd35e8b9fc5fdf807c488d1ac4adfa6c3b92dbbf87034a6732478e1bf7", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:8279ce237a5b4ffe5a80db09e71f06bdc8a4838910274ffc4e240ec99c185df5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:f89476b4ce6acf51cb0628609027a6c44a90db4ccde4da07505b5332a00b7c63", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:8a072efaf9d6f3af5ec04477f28ec73585274598b69d2e8f24c8180dcfacb15c", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:d5d67d239ca7cd2acbd4c5e15fbbc0f97810139fd352e9966c1e63a7d6ab5188", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a4da29d700315627bf480c63220b2b639ec0b87435f9ecca111eed86c1e019cd", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:36bd297ee2c16ed1564895422c05f51d957f09ef17120ac2efc93dc46d2d81a0", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:053f0dac3d8bc41d9dcbaf9b3f1c2e55ec313e07465db7462fdacf8fd89ce553", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:97391dbc81358c09228185edb79cadecb15bf8641fe8b6f3cf9ef970d79644ec", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:9fb7d00873d78a196b1fb639f107a92cf007803c7eaa2658eba7ed05081acb99", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:59f3c694a3ba5e60ece2b1ddeb5f5bd4f00fdaa67a5c7aa3a8fe7bd302963523", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:c05ca77b6a73640331abcf4018a9b7f2f3733f9e128bd96d7131ab7ba1fa823c", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:0e54e45a5adaa7cc24e6a273e25693919e92f498e42b8e136b7d7bf29be2d6af", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:629423401aaf5d0f529905a421a461d2f1d7ddbdb94020a140831f8873724c39", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:7ec2279bb83c931e6f379f45255a0727d207838ab55930f7595e0ab1e95b8db3", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:601b41f04bdd9789e01a1158241a17c7c4f937c88adbc75e9bf8875ee7cb0756", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:9f9cfd140d7b13b9679ba8b8d7a59366294db02d816d60af2e00a3fff1f6fed9", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:5d64fbf2f59d2c8ce842a9e8adf39877e41bb1d3e77c374681044aafbd662d7d", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:1ce15bea5149f786d9b714426a2870c43d01107f2e3a6bd4b5b324a166508dbf", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:cd287fe5971d71a4512ad52ad855f427c8b722cf7aec6e884ca646ca3da0df2b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f6643411d4b5fbc33bd87d4b3b1d4ea1b5d3659a2092cdee9ecbd4dd700af416", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:41bc4ffe76c9e5c220822efd68a2e55b1126b38f646b7c4016a36263a89e482d", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:fb9a0c7ec7a8627b89649e44dd9e2d6e4cf70166b2a55f6509f898695510376b", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:8b52f0bebd92342ee6b7e00dfe3e20d3a0f041badd4312b9b22b3d3ab0d1b3b1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:22ed092464b3757fcc58749af15cc33319f406db1747f4b28f74feb123969612", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:7633e1cf2075f6323862d89b5e0072681e64e41895b6caabbc8c6b18223dce9c", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1da30094d8664d16dee43b934829c800003e49304f1540e5b41f9fb12a2df4df", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:6cdf81585aeb903ef5da64551f6bde953aeb48f8623a8d416485847541b7b283", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:1af7de0f7c691873148f17453849b3dee97e78a1e8108755c1c133c05f29b651", "enabled": 1 } }, "distcc": { "100": { "checksum": "sha256:bd9199873915ce6fadfc570fba837765971726dac64a74e1ba74c55dc0b24067", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:1205bd72660c46019cfb8c3a899accaefb280f5f6bda63850ee2b508cc4542d6", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:b799553c2c0ab0abd040196142394a15d429e15b573df56edd0e150295d6993c", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:bdaf9c5be3de423b3d1b72c8bf38e2315fd58ce10ca6a58873c7d3e3a9c8aed2", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:1de79cf621df4cb04b8ee1201f38c91d8a23cfd85928894d4f9a8d3a27dd99e1", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5d8847ac4f68cf59bdc174bc1ce3688f86efbdd4a4563f701cdc74b2fa01504c", "enabled": 1 } }, "extra_varrun": { "400": { "checksum": "sha256:6c694e4be5a9d1895e17048eace0eb110c69a81ab1d1e01d59c2a075e08a4f42", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:58fbe8fa7832fec940b7afc7ffe8e4357ddb5a03a662687b928f84029d81c781", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:c821191e37683fab6a25fa714edaa75bcd7a81760fa8b547c31e40967875a29c", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:09288902a734ceef738fc904463b50798ce700c15059c70d092412b12ead156d", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:9fbdec8e421e1fa27dfea13b163cd0810d404845ee724b6f1b3ca5e6500a42c0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:9144a6012aa7771292a276576f811b7948abf4b7fe2e07f05c66d232d5811055", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:ae1f3ce0ff3a003f1db93dbbe09084b0ba32675b332f9930f23f9f5e66f57204", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:60856e056bdd9de8ffce0f5468846b00616fad40f87d38d5fa73acb74475d83b", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:8d10737fea4fe0dd3ae3725002a8f0c5889a3645ba4894e9dccec01a3e51b3d9", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:260a661a05f5958d32eecc692d9d5350d51ec0ef9e9bf29aad653d8637ceba29", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:e206bfbfcbe748672784fe52a91a1220965bcae5ff57dab458ade953f0b17b80", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:8826b12f85b02168080b03dec5eef5c91283ba1ebf8370022a71170064a97dcc", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:00b8b8e23b9e36087646cffa7c5126b0a402ac38a958930d27fd058f78f67987", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:181e899c092e42a648f7474f936d3413769842e4a0192dbc91cf587cd1547ffc", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:54578edd17537e1639df33aa54a731059844519c32cb8dee24e31b29f499dc67", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:325a80a2f12fed84077e57ac8725cdbd3449114115ac74904280c05c4d9f1597", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:9ac486b2d71758e95a106894de9c4f5b21506e07caba5d3753964556cb042fab", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:0a0e0d24bb9866726e90384d92166829d3c43e6086613b425735544745295adf", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:cc208709ab1c0862004f9576e53a62665826c6cdb5f443eb463d8743cc399769", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:9505b4010a4aafa33b27c1a73f02f7fb2ff720e95ef943b40db387b893b7499a", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:a1966f6618bc0d636a87d83d852abba0b92bcb8aaafe82837b39958954490ad5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:80108836908472e7859b47ff8ba90d2c629f02666a3246c2dc7e6039ee1dc099", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:42e7cda751258014b8bf2492522d20dcc0a1c96027d8261b7996289ad136ee7d", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:840c649229032dfd9b5880f50fcd371e5cc4c87fba7d424f03f3f5f28cb1f686", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:ce63d6d0ffc035614b61d82eae48a44485151cb6e93a0617c782116187ab1ad3", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:3b3f4538fdffe23885b90ece09b6859afc8a0b7f3314b9b4a60bcb9525776725", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:8184e98e265b9082358f87a8a715bf235f96c31008e60541b742525e7f09bce2", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a57b0a11f54bad916a170bf890b15978ad925ccc5e976d9d7b94b6c66f7c2e83", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:fc4a2c076ee26500d58559dfd29fe267a6f1ec33515064c8daa16448b7aaca9a", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:b13286a614402a3538fc0387f3d7abc30085c382a33e83faed9be57f33b63f45", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:37d95ab4a25b542db931edf26632d35e3a969239ff1de338b037e2e5ec506fad", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:1eab1ed96a9f87898b99be5005c598d35dc079b1ab5a7214ceb6e3e5c50f8810", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:6719dc568ff70220e53b2f1ed86d9a395a2f038d99901396022e4dc63d4ae868", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c280b017518cea08d176260a60012fd4d62882dcdf6bc9fc2005c74573b2240c", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:a6e5ded6ba1592d16d507e4f87b6078156d99e9554184a9912a3a91819ebb5df", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:a90844f8b8a25de5abadb4887f1b1ac84367f5ae248d9213a90a39859b3e5df3", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:40b455ce92e388b7f1eb0c65645000ae54076221c2acce0fa34c6f8d29d6ee67", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:392808628481e796663a1b99d1340efca31995d4832ec45fe71a939f12c117e7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:59557d1383fbb0a9586e18a4b129912d3ff989dbb853ed29bd0e27dfc160351d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:c850d134886113631f28665513a0536ca98fce16e53a9b3f146d1449ae9e0ee5", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:208231fcd39727d36f759dca410d8675e5852b7330f966aa86dc6e37c9abb22b", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:593cf420e0ac5523489f53d4b0cf2af0eaf8821d841f947349963159834a764a", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:630a305bf2ae45b8211c97cd029f1ae4247e0a00f936d8595e3cff59570cbd5f", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:104ca47441ca07c42c5e4770c1eae2178d2cdb880a174581032c7f846a05fb6e", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:b0baf75f1edb1c27f1caf49a30874604f82791ee1b1c85c38a06195f8d806b0e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:ba9aeb152542b5bd253d5a6e3b6aeff3e857615f4f42836c19098d45263fb120", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:177e6ff2bd9b8e6800b6138497d26b5cdd005046f6c62f672ecc66701b1251c9", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:32c9122d027bf6229b8cf18a4d45fc63e38c5b0a3656312854833e4342e0e608", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:42c6066d4a0751cb1db4526c055b0527a4d9403b45794571ea0dc4c71a666bec", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:997985873de7774ecab07db71db7974723494b65a569e2f852977c25d381359c", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:80496dfdf52576d83029c83097446766868b289a06aab9e9df110b733594a98e", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:c739061ae87ecfdebea9afd0b8021aa3ea154e8e1ef00ba148c82d225ee0c8d2", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:81d97ceabbc97f1b524d3e0e60904f5225fcc44996a83d9db67b7ef3d8b18075", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:8eecfbe8b3b75068c3c26b6fee1cd79009098d65b962b8a847438e8c31e9d053", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:2ae3ef5124e180523c5f610cbd536ad55c7e0b8e7c551201c29827e59c7c1594", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:34f943a522e251615c58df783c4ace2086a1752a3b69e5cbfef2ec5d42234da5", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:a0a2baa7b6c1d5ed5e5582f7ffc7d5a8cf2d4e7d034f50b1f3d0972fc9674939", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:78f45331782c43239be7330f5b928d9dace6b3ebbfda5e07c1374c462fe06923", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:41297d28af002c4e97c864d3b5ee64f49519b4db72a71b5bf7cd104c2b05af0a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:2d6c154dc940a2c178931902f7e0c0a1e9f9956055f92fc1bc92b1f2143a674d", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:33d8e3fbc9f8f48ff7a69685721a782c9f8b62bbbd1878e9bafefad5bdcf51db", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:653fca3667c90bf30da196ab61d79ee5afe1ae9703324b2512180986eec8d6c2", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:949cb7c7b62d17c998f63d9970d6fefbf5b3d56d65f729bf21a4f6703135e3f4", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:c1e22e4778b465a08d815aaf53d71ba28122b061bef976f522a2304366849a2d", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:a308db644962bd0893fe1b8bc6571460b377f728ac28632852ca3b9c281ed74e", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:9925a9acfb6375d93a08546a581a90375ee8582972cfc9d6884204d538b895e6", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:0c9136b18fb83249b1dd825fd497435d852adfaddc9d618ac4d269843a458317", "enabled": 1 } }, "ktls": { "100": { "checksum": "sha256:f15a20f050208e43060eafa61f63a8e722792b76724c7f2fc44c856879ac70ae", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f2322f689c55de691d98651af5bfece0b87608950ccd1a92e9225cfe47415851", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:454587674794c66f8b25f9e90154c291e81f6ab93d7c8fb3107068cfcefb797d", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:4d05909abe38f75a72561bb28fb279f4771d6886406de5d4665111db56181972", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:dbd4d9d61f7e57925f7a61e0a42d65273d8be168f6e3c77b5467d7b9a93817ff", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:3121357ab50a02cfc634a5fe4250aff89a1418865918569b77a10cd333cc0018", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:3390d25acd3ece1c7404db8c3db0f5c80278d5063fab9c8f4a8bb5584b5ded16", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:bc457c7839567f5943e06ec31f915742988f5e602c918a3a0d46bde5b94b6c78", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:d369ef834c0087ca09871e4dff0128cfc8e39a97e1e3b5bd3001fd752b7af5cb", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:c739c49825488aa1ae74fd218a5718aa3c859cd1205a1ea581710fe539bfbde6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:6a59e4d4df92e3d73d66b34035aaf00f5ca0306da24bd478c72a39c7e7844960", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4196d8e4db83bd37b4e883383dfe8543fb33029b42c557fe5af7e8475b558584", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:5427ae01212227c3a719cd1e5664c1290175bd574d7927903102147fa51989c0", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:7d1a24bbfe8deb3a3d7aaa92bfc9c922baba1476561b92f828aae226fe9dc3c4", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:b772895524eef04c9c79093c837e6033beff39717343d76528a8a85e4a466bb6", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:5017fd004213b4ceaf374bebf74e35a0084faaf6cede37b78769036a05e34b9e", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:7c71eef6360c66869a42a19a34ee30abc1064de8fbbcec0098d2ee57fbedb79a", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:cf5a647f3682f454b850317643416460ce6a7710f3f5fec6b0deac40e3c72e07", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:067389c903715a12a93937a436e3df918c42a4871765668bea50eca4f02212ba", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:6cffe11f14b5c03ba0969f0a3f476455cfac505f2cc1f2d467222a21a3ed7c5c", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1ea9c32ae0a7becd1e1879dd4c4b367d450b2721dd8fc3f771081d1568b450f5", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ea5057da646444d5450ff16e5dcb82ab338e8fd5fcf5f8dd72e782ef18ad1031", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:8de073e5cf69c58d03162e50f5fe7537ac8f90c81f02d2906cb10a910a414ec7", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:7d0336a428c29ae9a91c18857f594a16f74f5a963607fff966e7de78102ff76b", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:0464738bfa038fc9ba7ce06c15abf3ff5c2113083e236dd8b96b5d85b1fb51b7", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:9489c6c732b353e34ed3e5624fe8b73c336f4786c47bc30827b4a5a59b7dca44", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:660ecac63132d47b51afaeea6f55f74e3a6f25141a4d0d28065e094d7cdc6c75", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:b0a2d9c52715e340983df89e8adb304ff3790b2564659fd821843a3f172d46d0", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:04b77283c6d821ca98ecb58ef7bd17f6f185168786887a67f4c71cceeaa0476c", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:ff9433431cb560a4ff03dc02129289a0f78d1909fe1f3954347f18e318c3cdc4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:dc069f3a6c78dc367c39cd7e50fe17948cf9877f3e306f090f1160b07989d503", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6890958fb0f7c357a4a9600c34e21bf6fc9fd8ef36e9a5ad516b3bf2c1d88bd6", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b61027e2a84c3f6fffbc7eb3fd40788bd9dfb036b3e04a8f77d233e10c9f2ec8", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:e08540cc55168dd36811b1962936ffacaa21be50b15b9d5d34fa9d55dfd125d8", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:bd730a6479baa42060a62b9c7346dfe21ce28e1a8a432342aa5f302c2cf8ef86", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:01131128229571749a7f5df2e65e22e9850789bfe386926cb34e91153ca9e88c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:edb0f4d496b429a2b09ff9b1d74bd30126b5ee2265a4370f6e992cf9d696de0e", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:b28911955f6731646cd779f6b89c2255238c3e60e1b93d227ce588484694f755", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8bc2fc39e9a6cef06df178607ff3e17604e86d709575d37a60de5c1fd2b9fead", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:6980bdebf1af99aa6822dc970cd6d5a5b430381aa11e96e40244db39265b5e4f", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:3b235676dff7abd25b2b57fa770833d05561bdd24216f4de1202e9ced52a4f4a", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:33be40fa2b50df5f7234ead34a6471ff1eea62de62445e509c28e5bc8a730364", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:0d4fd8a1f74c8e46c18a93794b305dcccf3d50e9db095b659d996712e2905dc0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:d4f61bea290cce978cbb1653866414f9f848bc56ee6491cf022e9131dd2ff5fe", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:6f174abacc65b0de9248c39a31210eecb6fdbcd15ecff5bc254fb0d366f83806", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5053d74b0f4734131234b4faf6cf7815a725bfd5b73b6acf07deb77a3cced1e2", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:0538a3f6b5c469223bfb2740d7365838eedf7ef65b89353645e9d3bf6e17253c", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:f8b11739918f67700fbef58c2ab5c87a61413acf6aa8b650a014285c0c3684e2", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:a3b7c308fe73bec0edcfceb85e1e1799927a4d7e25ec4314649b447f670a49ef", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:dd752acc5dc10414a4708dc0bc655d7861bfa74bb20863aa10335dacc53357ba", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:bd4724acfb4c0ec9283595e24e29f9926c18e7af0169fd5eb344ed00de6bf393", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f1e989b744c90ee0be0978d34da65a84fdd81e5b6aef8ba116560bc157d73f0a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:d2bd05813a6a5257688f9bb486a1bda49fb169eab4f16c3d503e01883c52bd11", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:03597af2e3a916f7c4eb83e1b360b24cad9e86ce814494bd68da602991a70e7e", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:66173ad07abd0c8bb7e529350399507549601923afeca8e2ff2b0f80cb9992e3", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:3399e9663584d6d1032992f903b7aba4f96f4f0b7a5971faf90eb816cc7655b3", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:c1107cdfed17e78cabd9094b3f6aa1d9537f70bb4ddfc236983cc5fdc167e8ca", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c73d5f710032819a6456d1020ef5fc8bb683aeb167b6169f56a295c31b14c72d", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:d733f8dbbcdcfa398f6f139831236fa6cd0abdf132090435bb647081d2f6a785", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:44657ecdfa5bc1235f85a50222e025ac4721b24a01af6d167525f7cb0a580c31", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:92ded69a63e7ecda34b1d8ef17ffae8c9e8075046a724f8f8242f4b66d2eff19", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:5dc833e3b3dd31a1af446c7883f6a2b92c40b9192d072ef5de2fda7ddf4f84ad", "enabled": 1 } }, "passt": { "200": { "checksum": "sha256:d778011449f026622cc05ab496a39b6aa55a7e6447621a5ff7afc242b155b0e2", "enabled": 1 } }, "passt-repair": { "200": { "checksum": "sha256:7db523cb1e14c32587544907a28237c09c418307c349a9c6c5a0095c9ef22533", "enabled": 1 } }, "pasta": { "200": { "checksum": "sha256:cbdee1f9990db7defe1393b55569dcf01a84786f38a49e923b023c7c87bc2571", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:924bf0bf4f0b2ea9d633ef46f55793acb2eb3da6379bacd355814507e5ddf67a", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:8d6835bdf52f73dfd1acf73ce13ea8325b0bd3d0107b0ba86953fe2fbee20330", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:016a326cb4a747756723c0e7d675e4992e8abfd1f51a6c06aa93066bf45412ea", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:ee292c9774f2109ffcef5b2a1ac7ae68e44f719ba40d155f84287fe03a6c01af", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:5d77621f8da0f789c1b9ea9ac24925e02e0a7fe2a3a26cd7e5f46085277041bc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:6cfcf3051765f61e954cd243d3b652cee14d378e4925b12569512e5ae815b40e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:07669cb2df2c61ec4cb621f3332f77f351facaaf5232a8a72c61a5ee7bb44d71", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:24e235787e311d82b99df7b41d724da0e18edc3bc6443f9f83f8d6247e33cbac", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:2c0350e46ff4eb97af27f63025763c565d7097457d4cde6f46088afe7f8929e9", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6c7d4f4b8227aa55a5f142bbb8faef130cd10710101eb6f0aacb62547db5f49b", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:d59109d36dd2868269eb18631e37feb5981db0aa780c55f7e0fb66d897e4f48c", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:93a95273e16837c24572e635d58446ed1162ecbfed59695e866058df4dcbec2c", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:f878b2cf560b4bdff33fedf8c8f2011af390b77ee8f9416fe93ebf46153c97d0", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:7c128725a61bd30f3e35f39b9a832e5cd3ef435dde58241616b24e28f67ffbe1", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:60153b9f850c92927ce2a61becd9c248ef56dc0ceb7ba990185b98eaa9b011bd", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:ae9f1c81d0877b9f40c9d9bb5b862b7c58c73da9045f850a0a72d1b982fada35", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:8d550f8b9e80beafd06bc1392e60ecba8e922f8d0e609fb6674de5cf27c8d772", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ff82ca8bf6365948aeaf3c14fbc7ea9a212074d1462a31aa676b542d0d76c882", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:664148c3f8d4a649714cdbcf15e4862a5e648e0aea83d4530d23866c78c8d8d0", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:d58fb38422b37d406bf3e79136e3a94a40885c08f9c1591975c9a7495b7f606d", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:8194c7df0ea3abd18f07481b0181e01c5fddb21ebb594ed5b20bc1ced555fb27", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:ef1377e6864d9b5049866f6f0c3986e474499f1bb0082e9430f208e2c9d84b54", "enabled": 1 } }, "qgs": { "100": { "checksum": "sha256:add48a13d9b3cc5c82c73c2ca7d72db10b074970c14e26d58b88f670f9221655", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:c5e1779123c640fc55da0871bfd96bb124d8c9b50b9065136c025c83364f453e", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:71a7ff78c03cde811d19a4c115de8a898007bdf437a9350d4708b3f9142481c6", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:e66ffb20855170cda4ec60840ce05e73d69dcc54330c86b24dd89ee96bcd1d73", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:682232f167f6ecaafcb051df5557addc52b814e923f143bf37a2035fb17315ae", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:0fede9cbfe184d19e8ac7bb68a1ce8a110aa45898ca782e3c9daa5649a476fba", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:01fbaabbb5b83721fe19a813401d94510f6fb260714c3adcc40d54fbb994ef70", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:a8e3e2b90df3917dbaf684a1bdf72432d8bf2aa6ec41233e06a2eaf02aa81686", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:8d5ee75190133ca16f3931a80ba1202b6cc171e6a3b1cba6dc5788a33bc84e0a", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:fdf6e82be7b620aaea9c8928edc39344d32dd9b1c4e0f78a6c6fba39bc005b6d", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4788c42c425e54a8dedb4882a6a2bd2183ad72f980f4217299be830afe275069", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:7d65968a2e3d186de718f9f6604f2cce60bd08bab6dbe0e60f60222b228a5744", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:78d9abb7263a5c028d7065c0cadcfe14daf3b4aa064e679458f3bf271a69d2e5", "enabled": 1 } }, "redfish-finder": { "100": { "checksum": "sha256:e05fc89dc14e7a723647597786aa62adc255ca1301474ff0c29dff49e4176e4d", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:825a97c385fbcbfff670278b26a17f91bbfa8585f2219efc48781e0e510bf213", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:695b31e12a82435b57e11459e99444fec8d09aba051b1a12b8efa765608dc719", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:892885a058782b7fdfb5d86e5ec3ecca261363a14a2254652c6a7ff8a52807ae", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:39bc17cbd08c0377eb935fd0ca86b6542752c5ce07cb0f9d9e5d8adfe4306a13", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:3da6785a2c37296fb1ba2a1b621ebccc9e0837d9acf69b3442e75f3a60f2a484", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:912bf2ea73ebbfd1d5fefee37b336a9002345d01f8eb54cb164c28160fc4f1c1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:66b1ecc6382afc5032df2921281550af0431befd8cd517c4f8c68cab2eac0e11", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:4ed93113b5ea0760e89533919f86cf1dd26b5587a9d7cf8bd951896fc77d7fa9", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:008a840aa2183d0fbf1b3f3bb9542a7ba51c03a1e3a415b188ca49d2e4ed7e51", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:3ba51ade82ac9113ee060bb118c88deccc4a7732312c57576fd72a70f40154aa", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:b4fc4fbb8572088eb785b643f5d103d5791af96d37e6cce850d671d9291bf70f", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:6b4e7757f0422a2c54d93e920ff7b2c5bd894d495065b3827a741a768f042b18", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:702d5df73a6865bc249ffb537ad7a0d2388e1540716e4b2f7e844485870e37bb", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:4cfda0dd9868ff0890c7a612f07c282a8cbe4a319c766d7cf842ed639fc2b34c", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:64c59a71e1786fba000398e05773c83fbbd9f92c0341e52cbefd1386357b4e16", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:2f0c18590911b20c58bbc9db0c9c0c471f4d66171f7400079a2e956366580e24", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:f19a726a7c78ddd9aafcf8d2c4b6a57bd05fdc8450a91119e1f0d0abc09151dd", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:b29d987a469d59767e7120202e2abad06865eaa84d3eb61d2ae6b7a78c1d6dca", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:44e8808dad842eb55d51c204374ef445bd8515701db580d2c91f06ca9949f2f6", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:4b1585496c5777fe140f76f11a62df0ddad219336fac090139efbc368520d38c", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:2a990092d1cf38541a49375e9e605d82515a34e19b9ab6b70392afb596e0c612", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:80bda9a30a4b5ab4b6b14d7f6c92efbfd5a63658a4b44565a02c2c552cf4a28c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:405780af5278be0dd7f89425f91ca1c48527743d2b6876bdbdcc7545d487dc09", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:f76f5b094e42967dc240e161cb187bc528f2f2a3ee2ab93c53c0b15d820c0921", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:99c31c501752dfcb8460f44b4e363b9d57b85c3ad422a951f13f2d42e5f9f54b", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:8361387196f6c48bbed95c77561bdd324ab96356d6dd0f4874832accc67738a4", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:89169ffed763d6257769d5ed83185a9eb376145baa60dbf01b4088f37aa663bb", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:7727a62bcf612392c76d46f3cc8c22f33c3c87c30a320805ac9844ce68409ecf", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:1ad633f30ae0f80052b31090652780dab90b10696c098ac81ea831035a652835", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:c9cbfb3894148ab693f0c850232f3a1b1aefe5c5cf5f4a06bc74d44cdd2b52f5", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:67b8654cf2404ad763f5343ad3ded35f198c26e99b8a9a150143911acc89ac6c", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:6ce5485715b3caab30a72313601de971e7118bc2997a2edf6ce7b229e51c2483", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:9ff7693f6fb994a0a53dc46230b7ce6c4fe6dccc2b2ec2c8ba49f7c1e3f24eea", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c888a4b5fc698c1bf7551bfbc6d6ea7673a5f7f41d2467af7e15ce634c71e2be", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:1ed05c5ce069437c9de8a57326a0329d883ec753f3a11fe4f70a43ad212ec482", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:191a531a60c27b33fadbdb48213980f03b68efec3287545eff3592fcdf4bf686", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:e6f726edf701657c80853712b94a4bf5dd0430254d93db45804e60a243c51818", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:8a6ef7c3d8ee76e112224e0c4e0b91572db8c85f547bbed6d7ce3f6f6d4383de", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:cc162915cf1fc3cc66616c3224e9e848485198a28868c237adc9d7077791cba8", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:74b5c41b13bd849ce82040012f557fec4b9cfad3a9072f9f17f78400868da558", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:91acb71305dfde220ce7574e2ac67af16e6f8630639dc66d494cbf8120d2d07a", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:9b8a5c1ff4c21846701eb5e0603cc022f4530c568db6d9fab392e41c0ed64720", "enabled": 1 } }, "slrnpull": { "100": { "checksum": "sha256:bcf004c239b72d23fb4f1e5842272bc20f287cd312ed394464db8cb9218f4377", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:fc3eaf23ee99b98d2ff17a5df04776e8553f490d7f57d49a24061cd49bfaa997", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:17d8fa5ce4b9402dfb10ad431241cb2a5a1b2f726caa03ae7f1d7d410c2ab6ae", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:6506687dbaf850c784d6f2af14197d3c1768514fad98e08fea69e92a780ff65f", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:59b6f3643d2f404ef03d749628b6872fd650b5b10851862b4accad8276bc6f29", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:34b45f69552f2b284b1f6e0876e4a96d1c05c28e4ab42d2bc2a241c03fa73309", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:35ef9c580c4071208af6169ae1059bfee51938d36dbec2bc2354d51ed5dc505d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:5594f07c04c9057b74df1612012c2515265ee04d58b11bfa46a73531b703c1f7", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:b00a50f92d0e8ef2789d03756c7bee69f983edfc4a3f409304835ad25133e3a4", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:874410d4edbbd1f73ef0e69ea40e93054a5d65cfe1556b00f6b474b928400a39", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:400e9b1c9ace97d2e43b5916b453d189a5c6f60133876f15672a48607edfd0ba", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:66beadff1a4ed7e48b3f3cee1444f5f1aaa833d212cdc76068f2f306b8455970", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:fd8c0b8cc073d8025ab8754b7885e0375b4e700dd3fcc921c45666829b652de5", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:1b2a0e330daa04838742fdcd50a9b539072c58d48e949e4a3ce7933da47cbe3c", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:2ab07a8deeb7ef4cf09f94bd2ba250166a4d016bd9c581ddd470ab2784baf5e3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:e7caeb60df6f2002f7be4adc7a1506b6fb585e6bb9f4585381c115a90bff4a15", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:836d01ecc314a2b2b4eaaea69ce1e4a03f3274bd8bd25e2b64d0329e6f9d8f32", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:e2c86cd06c00d3ed79b9f7a602b18593d5929156df58e761a04a3cc3ba8be891", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:67fec37a17724a9b059f936b70c199d96906b9bbf703dd8a1670852dbfc7715f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:dd116a718e125ba88d28936b746a2292088080254134d2001084e2d252ce9379", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:df73dbc3f1e232bb5f4d3ba0bd1850eae3c3bc401508b1819c0989b8f67f8033", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2eb63b8ac8f3038eb1ff3bc18fc5923dee4ac3f609d8a14791300ae835249a9a", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:d342a188298c1fcd4df99c4235985c50ba2f02a4e53d01cef3de48bc31464ceb", "enabled": 1 } }, "switcheroo": { "100": { "checksum": "sha256:f8f67d2c990489a09a436dbd72704b13d6617fdbbb8c5c2c040a85b584de6a7b", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:a8f135ef10becc2a2ffd4e7faf89932ed4aff16331eb62d59e52ff2a5c0966e7", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:fc1ca3d8b12406dfef9f012c9275817169fbfafc411969e60d357be3b35835a8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:ab2acab6cbf273ed7e78e577b0e2a85225adba387b1a8908b180b07adb950e6f", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:815d229f0b5a8f8a44cd511b5927febb002596a8aad1b85406d674e59378a0e5", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:2a643246c63d64d4c57f3877ff3daca2637b195330920c2efd840ebade3fc20b", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:f3896d2de3794d7dd54fea03cbebcdf4e6b63bcc512d2fc14433b3be400f4188", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bbfd79953db88f6db10739803d29b003d83311a21c75604d64ed9fae26da541a", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:71c6423e6318342438fea1ba8a38751b5741b4482ca8ed075dbdd36bc6fda9aa", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:f482585c8f26517c6ed8e9203bec4adadec8ebc65840089d7483e31ee24fa679", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:a5312c216b56620ca8e69679e99275e793b3de9b6e524db1a5678d22b9909056", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:3a4e10afbea76bb0a825f3e10b6be09c1e380f19737aef7a6171a9744c15b33f", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:58aac19837bee6fd1c5e3d1e2a9c9900c56b9aff34b643fa9d958399152afbce", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:46f7b10654f710546a61324618f68b753849ea0b6a7e11f431922a5c848fae89", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:f3d5b0012a6f6d0255e831f608cf0d77f1af38a975b222a7f71cf0821f359246", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:2d749a0f3d39317412feb3388eec0eacb60859891ea7da50373271f03ab66c5a", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:5b1a3e31fee719423530b8c7c07b6649ab539d38f2b446a3e6d3f029a65696ae", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:561814e9fa4d9ffa1be3bcc8e27ee1a50260293a17de3db6eb9d4a83e14e8faf", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:48fac9542e02d0c8f461e03905339795331b4fcb2082e830e83189e50af59040", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:80d84cb83923e4d5d6b9870b4311a67c87609f010c5ffcdcb00ef6e926a8d785", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:33a8bba7a36dc094b6220c0dfe282a9e57ff280511965c99d654f4e584f960f0", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:38e42ce3f0baba47216f3b50d7bec9ac531a11d659c8807d0bb43b5e5b4ce873", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e9267049c61e87edd481214c8cedfc02cb396789c52a150b58d8fbf0401bd455", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:2f55ef3a5145328ed09f316753cec5b85f67c1b43902be5152fc57c4b95c3026", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:51ec0952bf860ec23e3bfdfd53f3bfad841a4e5b560cc25a9548c9b207504194", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ef06a218a285a5a01a1e354d6a40f826815203dc323d00ad68e29f85162c24e7", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:f71781a997aa0d0df5c9baa600b6212105c75cc290bf634a198ed0d5b42a668d", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:f58eadcb76889082e3a109afa993bc7eeed39675991d171a13744bc8b61c279a", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:4b8e317234ae08c1f4a80133c8abba35d412f5797db3c4515d0cf051c35af6bd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:3c2a65084450b2459115a69bb1d382e452a1da63080ac7fdc85bcac36affe1c7", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:ca220cb87bf9790b38738b6f08cc800a2fd0e083960aa4770c9385b897cd31cd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:cfcecf645d2d8a59f98135435d535133a39f70f46d9b47a65b15e88a3805861a", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:91a33317bdd39510dd305d768e2791d08b207d8384bfca22322ec49f5b26f9bd", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:c500e8df08994b81cc1d743db684060d03bfe4465fc12eea9a4af83a69af307b", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:db1d0917d263b447f9a744edfd4ebfeca697182c853295c7eaf49f1270218858", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:84679e67832759be8220885abe3fa0157305fc8f50efa604b1343e99907925dc", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:5ca3d53e3b62d5973442d210faf9b9f5f9b5f4935a74074ce4b18836c8d78b19", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d8fadd99af0d343c815f006330529911a5106641ed9c7d22a2eb72e0d9d55d2d", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:664ab4aa1e1eca422d2c627a22a9631ac348221893713bd9a4d97a628094b1b0", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e68a71817476b5ebb8ae2e13e9ea9418a31dd64ffe4e156258cb77029635cefa", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:f45c6d89a3305814e44a05c0d8c8f8a4ce8a923d721e83c9579f76d8d8cd909d", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:8d828eef8065f2486b815aea04ed491419e3bf17508cf0ce595fca71f872ba38", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:76a11dd14f578f940e874ab4d68ca1370ddfcb2585b6a3a955569fadb77d269f", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:17759c6e3a6229e4a40be0b8121751d768f00fd6ea0a872f4fe65bebe2280b30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:c9c26249a11c4bace4efa998ae826c3cd5178a19d323886a62b7e355ca3d8260", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:ea826918681193d37db69c814ee4c753fef3fcca809cd0fad6f924f829eeb9eb", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:a9e221f7f656f9f0b4937c2bd0f7b93124c7f48f4c88fe8ba608db1eaa5f05d1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:034bceb856cf79ac9329a4affb6cc53cf29c5bebb089c0ddd486a76148812b89", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ea40fa389e6fc510f40994b9b4272a6b985c80064b8a4d702d5813d5252487f5", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:308910f855a076bdf38241880815f6640dfba4b21ef1be58112deec3ed858d16", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:dd07546e8a114e1b7f5056d4c5b0f1256050fe93e867fbbb6c5f52d2c6f77ec6", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:870a818c9c3a4e4d24386bfc3fc7565af1c8aeec605b3d4cd819169172bb3e03", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:476c08aa43723ad6bb98a7254bc6cdad6ddab4aa63336719c192bbf6f5ba6700", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:e27315e58a548c06561117f2dcf86c67e6937dc1ef2071ee612975457091e40c", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:a077f44cc6d16684de9a93061ee0f7b212e3f729fdbdf594dee573fe5c30817d", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:8228eda847eeaa7529b089edb8c64763d03100e84117526a67fbb41ea006a2b0", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Set SELinux modules facts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:164 Saturday 07 March 2026 11:45:16 -0500 (0:00:02.747) 0:02:01.757 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:7bd953bc370c70fe9299b766f8a40a1659e03f7ef4dd6c722c3e182bc90c1c68", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:e8caedff457d24c0562673868860f813a6cf223422bc48524e7cf1e8df7ddeb6", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:1150e95aa33304027895200fbac6de5d0ec1ada237d1cf255f979bcf712831ba", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:634c80be00ac898add54ea6d59ead5a6e92e4d06a230b9b4485059070b0a3bde", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:90f08987cd8645d1bc99245841a9f2d0c9858196064df233655623d1b5cfbdde", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:c59e1e8e511ef99a0e5715ed9dd2c15ea0b522186e683ed8bf715029c4ef325c", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:ee1199b88bcd39ff6de202bdef25f1dc7292828d80856fa535fb80454dad000e", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:3b9f22d94579c8dd60f827159f6f15a2085d9bb799cbc88d7c1d23ce7a63aab4", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:449d303fa3e44bb7afa7b0a715e9566e1e33fd3368aee1b078529f0225cf56ff", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bfefb6205876b2f58e84c1952c749c146f4e2b8107a660e084614b23d60300c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:3a903d39c2d9de406f33790f234fde1f1d0b20bacae36fa0c6bfb5fee9f800c5", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:35030bf2d1dc7ec055a954de113ff7918709262d5c318040b0cbd07018e9ee88", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:5da016180d7da3fa18541f72cc69eb5c9ffebc2851ec3e6150bfd5a73153f860", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:6432b280ab64da2e35f7df339167f29bc9b9dca4c01e8e8a0c409b7a0adbd5d1", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:856e89b68ecf997f8a33e98c7e4bd2250a43f88790efba170f787434139a8c0b", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:78ff1f7154a00c128cbf5c237452baf7ed1cd46cb11378439b64432d1db58d4f", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:1e517a22f8a71ea3ef177798685dfb6359b1006205fdc97a0972ff1cf7125f40", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:454cc3d74ae64acf78ad17344d47579841f6b44266c6c3d56f58594918d2e3cc", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:4d13ddead5cb94be9c944061044e0bd56974a9db9df64f7259593b57d51386d5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:00bebe07cf015d4084870d1f0866913ae687801ca2d26e12c00df6823b3bc304", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:0cb5bf9ff94cee18667b41dc4d1b988ace9baa06ca99507a91ff3190f4e39d35", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:233825c029885cb6196920f19b27336b444411b9a15b956c95a2a07b89e9b041", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:afaeabb15d1d5e4f3d07865c5213f4a78ae5865d0f782e95d1c599e61b7ed7d3", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:2c8ef6be5667ad71b144c8bd4ec606b56cecd4e3ea1d242cbc657c1c993d99af", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dd35cbec0b5e8f81e3394a60905606fb9d986fd394ad60ccedfcdb60f0137b0b", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:e89032180210c66a288c43d2de3a47b285d38fa239226bd49ae19a1a0488f41c", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:96474cc59c799aa0e25123ea9909b4fb319a03f1b5f6cbbf1ae3dcda374815a7", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:7c8fa6c136fc6624a1dd4345c3484ffbc07c9a4be8b7543d78f0615680cb73cc", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:1b066f5d029b5584d34d95007991d218446244f994f3ff802339cd5890e48091", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:60ca58fba194f53faf1c0bc41f8eeeba9ca3de6f2da08f8940b6d1d3093e7c0f", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:815d2bba5c316d5d0334add30dca473daf3fdc85e48785c26c7b47b2ef833823", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:4ec687f59310bcb03685bec14fec451d393508d1ca5f926209ba967d42673d90", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:b6821587c3b2df8dc3ce8de9851cb1be120dfd68e5729141e7a293917029e978", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:df9850293d6833d206bfb3a875bdf69d0823daf24993b30f962da683032555e2", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:de4651616a6c8dea0dd4b018d3ab32c1506ba75188d1bcab2e04af461eea6040", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:91ab7c5c9df2a80b515c52b105f54e9247b092be7864be939d880b2f94cec862", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:bec8a93b694c60226db8744867c6f87775440937699ac0d023e06e7b7aee1d6b", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:3f5f3b049123ab0a61d1f7a7e6372bd7d2194feb212f2b5bd85a9148f21f7db6", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:0ae822bb67f347f0a88f4ec8584f394e3e10fc11363dcf34b1d583305e76c9e6", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d20dacb3b990c66c37bbf1bbd081a84a0e35f3cdf1501c27a5ec881c3d187d84", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:090e59b1324bf559d79a1ef363fe9bc1bd2adb928f6a95bb1628c92f93063415", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:80b987a686635b3e05bedf481ef892af7231100a61fbf6ca5e93da17dbb887c3", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:9fa130934871404f743c4803af509afa78e56b3ba2f83bd108564858f163329f", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:a68798c10fa97ddee5f54ac1d1281ecce65750e4e151076f4ad826187fc647a2", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4e04381e36d9df4d9f19ad718b1ddf4686f633f72b24d1161055b1f7280a81d4", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:33c562fd35e8b9fc5fdf807c488d1ac4adfa6c3b92dbbf87034a6732478e1bf7", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:8279ce237a5b4ffe5a80db09e71f06bdc8a4838910274ffc4e240ec99c185df5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:f89476b4ce6acf51cb0628609027a6c44a90db4ccde4da07505b5332a00b7c63", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:8a072efaf9d6f3af5ec04477f28ec73585274598b69d2e8f24c8180dcfacb15c", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:d5d67d239ca7cd2acbd4c5e15fbbc0f97810139fd352e9966c1e63a7d6ab5188", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a4da29d700315627bf480c63220b2b639ec0b87435f9ecca111eed86c1e019cd", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:36bd297ee2c16ed1564895422c05f51d957f09ef17120ac2efc93dc46d2d81a0", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:053f0dac3d8bc41d9dcbaf9b3f1c2e55ec313e07465db7462fdacf8fd89ce553", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:97391dbc81358c09228185edb79cadecb15bf8641fe8b6f3cf9ef970d79644ec", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:9fb7d00873d78a196b1fb639f107a92cf007803c7eaa2658eba7ed05081acb99", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:59f3c694a3ba5e60ece2b1ddeb5f5bd4f00fdaa67a5c7aa3a8fe7bd302963523", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:c05ca77b6a73640331abcf4018a9b7f2f3733f9e128bd96d7131ab7ba1fa823c", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:0e54e45a5adaa7cc24e6a273e25693919e92f498e42b8e136b7d7bf29be2d6af", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:629423401aaf5d0f529905a421a461d2f1d7ddbdb94020a140831f8873724c39", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:7ec2279bb83c931e6f379f45255a0727d207838ab55930f7595e0ab1e95b8db3", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:601b41f04bdd9789e01a1158241a17c7c4f937c88adbc75e9bf8875ee7cb0756", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:9f9cfd140d7b13b9679ba8b8d7a59366294db02d816d60af2e00a3fff1f6fed9", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:5d64fbf2f59d2c8ce842a9e8adf39877e41bb1d3e77c374681044aafbd662d7d", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:1ce15bea5149f786d9b714426a2870c43d01107f2e3a6bd4b5b324a166508dbf", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:cd287fe5971d71a4512ad52ad855f427c8b722cf7aec6e884ca646ca3da0df2b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f6643411d4b5fbc33bd87d4b3b1d4ea1b5d3659a2092cdee9ecbd4dd700af416", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:41bc4ffe76c9e5c220822efd68a2e55b1126b38f646b7c4016a36263a89e482d", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:fb9a0c7ec7a8627b89649e44dd9e2d6e4cf70166b2a55f6509f898695510376b", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:8b52f0bebd92342ee6b7e00dfe3e20d3a0f041badd4312b9b22b3d3ab0d1b3b1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:22ed092464b3757fcc58749af15cc33319f406db1747f4b28f74feb123969612", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:7633e1cf2075f6323862d89b5e0072681e64e41895b6caabbc8c6b18223dce9c", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1da30094d8664d16dee43b934829c800003e49304f1540e5b41f9fb12a2df4df", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:6cdf81585aeb903ef5da64551f6bde953aeb48f8623a8d416485847541b7b283", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:1af7de0f7c691873148f17453849b3dee97e78a1e8108755c1c133c05f29b651", "enabled": 1 } }, "distcc": { "100": { "checksum": "sha256:bd9199873915ce6fadfc570fba837765971726dac64a74e1ba74c55dc0b24067", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:1205bd72660c46019cfb8c3a899accaefb280f5f6bda63850ee2b508cc4542d6", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:b799553c2c0ab0abd040196142394a15d429e15b573df56edd0e150295d6993c", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:bdaf9c5be3de423b3d1b72c8bf38e2315fd58ce10ca6a58873c7d3e3a9c8aed2", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:1de79cf621df4cb04b8ee1201f38c91d8a23cfd85928894d4f9a8d3a27dd99e1", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5d8847ac4f68cf59bdc174bc1ce3688f86efbdd4a4563f701cdc74b2fa01504c", "enabled": 1 } }, "extra_varrun": { "400": { "checksum": "sha256:6c694e4be5a9d1895e17048eace0eb110c69a81ab1d1e01d59c2a075e08a4f42", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:58fbe8fa7832fec940b7afc7ffe8e4357ddb5a03a662687b928f84029d81c781", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:c821191e37683fab6a25fa714edaa75bcd7a81760fa8b547c31e40967875a29c", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:09288902a734ceef738fc904463b50798ce700c15059c70d092412b12ead156d", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:9fbdec8e421e1fa27dfea13b163cd0810d404845ee724b6f1b3ca5e6500a42c0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:9144a6012aa7771292a276576f811b7948abf4b7fe2e07f05c66d232d5811055", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:ae1f3ce0ff3a003f1db93dbbe09084b0ba32675b332f9930f23f9f5e66f57204", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:60856e056bdd9de8ffce0f5468846b00616fad40f87d38d5fa73acb74475d83b", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:8d10737fea4fe0dd3ae3725002a8f0c5889a3645ba4894e9dccec01a3e51b3d9", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:260a661a05f5958d32eecc692d9d5350d51ec0ef9e9bf29aad653d8637ceba29", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:e206bfbfcbe748672784fe52a91a1220965bcae5ff57dab458ade953f0b17b80", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:8826b12f85b02168080b03dec5eef5c91283ba1ebf8370022a71170064a97dcc", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:00b8b8e23b9e36087646cffa7c5126b0a402ac38a958930d27fd058f78f67987", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:181e899c092e42a648f7474f936d3413769842e4a0192dbc91cf587cd1547ffc", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:54578edd17537e1639df33aa54a731059844519c32cb8dee24e31b29f499dc67", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:325a80a2f12fed84077e57ac8725cdbd3449114115ac74904280c05c4d9f1597", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:9ac486b2d71758e95a106894de9c4f5b21506e07caba5d3753964556cb042fab", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:0a0e0d24bb9866726e90384d92166829d3c43e6086613b425735544745295adf", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:cc208709ab1c0862004f9576e53a62665826c6cdb5f443eb463d8743cc399769", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:9505b4010a4aafa33b27c1a73f02f7fb2ff720e95ef943b40db387b893b7499a", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:a1966f6618bc0d636a87d83d852abba0b92bcb8aaafe82837b39958954490ad5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:80108836908472e7859b47ff8ba90d2c629f02666a3246c2dc7e6039ee1dc099", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:42e7cda751258014b8bf2492522d20dcc0a1c96027d8261b7996289ad136ee7d", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:840c649229032dfd9b5880f50fcd371e5cc4c87fba7d424f03f3f5f28cb1f686", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:ce63d6d0ffc035614b61d82eae48a44485151cb6e93a0617c782116187ab1ad3", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:3b3f4538fdffe23885b90ece09b6859afc8a0b7f3314b9b4a60bcb9525776725", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:8184e98e265b9082358f87a8a715bf235f96c31008e60541b742525e7f09bce2", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a57b0a11f54bad916a170bf890b15978ad925ccc5e976d9d7b94b6c66f7c2e83", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:fc4a2c076ee26500d58559dfd29fe267a6f1ec33515064c8daa16448b7aaca9a", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:b13286a614402a3538fc0387f3d7abc30085c382a33e83faed9be57f33b63f45", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:37d95ab4a25b542db931edf26632d35e3a969239ff1de338b037e2e5ec506fad", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:1eab1ed96a9f87898b99be5005c598d35dc079b1ab5a7214ceb6e3e5c50f8810", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:6719dc568ff70220e53b2f1ed86d9a395a2f038d99901396022e4dc63d4ae868", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c280b017518cea08d176260a60012fd4d62882dcdf6bc9fc2005c74573b2240c", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:a6e5ded6ba1592d16d507e4f87b6078156d99e9554184a9912a3a91819ebb5df", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:a90844f8b8a25de5abadb4887f1b1ac84367f5ae248d9213a90a39859b3e5df3", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:40b455ce92e388b7f1eb0c65645000ae54076221c2acce0fa34c6f8d29d6ee67", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:392808628481e796663a1b99d1340efca31995d4832ec45fe71a939f12c117e7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:59557d1383fbb0a9586e18a4b129912d3ff989dbb853ed29bd0e27dfc160351d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:c850d134886113631f28665513a0536ca98fce16e53a9b3f146d1449ae9e0ee5", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:208231fcd39727d36f759dca410d8675e5852b7330f966aa86dc6e37c9abb22b", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:593cf420e0ac5523489f53d4b0cf2af0eaf8821d841f947349963159834a764a", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:630a305bf2ae45b8211c97cd029f1ae4247e0a00f936d8595e3cff59570cbd5f", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:104ca47441ca07c42c5e4770c1eae2178d2cdb880a174581032c7f846a05fb6e", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:b0baf75f1edb1c27f1caf49a30874604f82791ee1b1c85c38a06195f8d806b0e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:ba9aeb152542b5bd253d5a6e3b6aeff3e857615f4f42836c19098d45263fb120", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:177e6ff2bd9b8e6800b6138497d26b5cdd005046f6c62f672ecc66701b1251c9", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:32c9122d027bf6229b8cf18a4d45fc63e38c5b0a3656312854833e4342e0e608", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:42c6066d4a0751cb1db4526c055b0527a4d9403b45794571ea0dc4c71a666bec", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:997985873de7774ecab07db71db7974723494b65a569e2f852977c25d381359c", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:80496dfdf52576d83029c83097446766868b289a06aab9e9df110b733594a98e", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:c739061ae87ecfdebea9afd0b8021aa3ea154e8e1ef00ba148c82d225ee0c8d2", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:81d97ceabbc97f1b524d3e0e60904f5225fcc44996a83d9db67b7ef3d8b18075", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:8eecfbe8b3b75068c3c26b6fee1cd79009098d65b962b8a847438e8c31e9d053", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:2ae3ef5124e180523c5f610cbd536ad55c7e0b8e7c551201c29827e59c7c1594", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:34f943a522e251615c58df783c4ace2086a1752a3b69e5cbfef2ec5d42234da5", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:a0a2baa7b6c1d5ed5e5582f7ffc7d5a8cf2d4e7d034f50b1f3d0972fc9674939", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:78f45331782c43239be7330f5b928d9dace6b3ebbfda5e07c1374c462fe06923", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:41297d28af002c4e97c864d3b5ee64f49519b4db72a71b5bf7cd104c2b05af0a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:2d6c154dc940a2c178931902f7e0c0a1e9f9956055f92fc1bc92b1f2143a674d", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:33d8e3fbc9f8f48ff7a69685721a782c9f8b62bbbd1878e9bafefad5bdcf51db", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:653fca3667c90bf30da196ab61d79ee5afe1ae9703324b2512180986eec8d6c2", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:949cb7c7b62d17c998f63d9970d6fefbf5b3d56d65f729bf21a4f6703135e3f4", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:c1e22e4778b465a08d815aaf53d71ba28122b061bef976f522a2304366849a2d", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:a308db644962bd0893fe1b8bc6571460b377f728ac28632852ca3b9c281ed74e", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:9925a9acfb6375d93a08546a581a90375ee8582972cfc9d6884204d538b895e6", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:0c9136b18fb83249b1dd825fd497435d852adfaddc9d618ac4d269843a458317", "enabled": 1 } }, "ktls": { "100": { "checksum": "sha256:f15a20f050208e43060eafa61f63a8e722792b76724c7f2fc44c856879ac70ae", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f2322f689c55de691d98651af5bfece0b87608950ccd1a92e9225cfe47415851", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:454587674794c66f8b25f9e90154c291e81f6ab93d7c8fb3107068cfcefb797d", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:4d05909abe38f75a72561bb28fb279f4771d6886406de5d4665111db56181972", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:dbd4d9d61f7e57925f7a61e0a42d65273d8be168f6e3c77b5467d7b9a93817ff", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:3121357ab50a02cfc634a5fe4250aff89a1418865918569b77a10cd333cc0018", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:3390d25acd3ece1c7404db8c3db0f5c80278d5063fab9c8f4a8bb5584b5ded16", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:bc457c7839567f5943e06ec31f915742988f5e602c918a3a0d46bde5b94b6c78", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:d369ef834c0087ca09871e4dff0128cfc8e39a97e1e3b5bd3001fd752b7af5cb", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:c739c49825488aa1ae74fd218a5718aa3c859cd1205a1ea581710fe539bfbde6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:6a59e4d4df92e3d73d66b34035aaf00f5ca0306da24bd478c72a39c7e7844960", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4196d8e4db83bd37b4e883383dfe8543fb33029b42c557fe5af7e8475b558584", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:5427ae01212227c3a719cd1e5664c1290175bd574d7927903102147fa51989c0", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:7d1a24bbfe8deb3a3d7aaa92bfc9c922baba1476561b92f828aae226fe9dc3c4", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:b772895524eef04c9c79093c837e6033beff39717343d76528a8a85e4a466bb6", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:5017fd004213b4ceaf374bebf74e35a0084faaf6cede37b78769036a05e34b9e", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:7c71eef6360c66869a42a19a34ee30abc1064de8fbbcec0098d2ee57fbedb79a", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:cf5a647f3682f454b850317643416460ce6a7710f3f5fec6b0deac40e3c72e07", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:067389c903715a12a93937a436e3df918c42a4871765668bea50eca4f02212ba", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:6cffe11f14b5c03ba0969f0a3f476455cfac505f2cc1f2d467222a21a3ed7c5c", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1ea9c32ae0a7becd1e1879dd4c4b367d450b2721dd8fc3f771081d1568b450f5", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ea5057da646444d5450ff16e5dcb82ab338e8fd5fcf5f8dd72e782ef18ad1031", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:8de073e5cf69c58d03162e50f5fe7537ac8f90c81f02d2906cb10a910a414ec7", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:7d0336a428c29ae9a91c18857f594a16f74f5a963607fff966e7de78102ff76b", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:0464738bfa038fc9ba7ce06c15abf3ff5c2113083e236dd8b96b5d85b1fb51b7", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:9489c6c732b353e34ed3e5624fe8b73c336f4786c47bc30827b4a5a59b7dca44", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:660ecac63132d47b51afaeea6f55f74e3a6f25141a4d0d28065e094d7cdc6c75", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:b0a2d9c52715e340983df89e8adb304ff3790b2564659fd821843a3f172d46d0", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:04b77283c6d821ca98ecb58ef7bd17f6f185168786887a67f4c71cceeaa0476c", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:ff9433431cb560a4ff03dc02129289a0f78d1909fe1f3954347f18e318c3cdc4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:dc069f3a6c78dc367c39cd7e50fe17948cf9877f3e306f090f1160b07989d503", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6890958fb0f7c357a4a9600c34e21bf6fc9fd8ef36e9a5ad516b3bf2c1d88bd6", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b61027e2a84c3f6fffbc7eb3fd40788bd9dfb036b3e04a8f77d233e10c9f2ec8", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:e08540cc55168dd36811b1962936ffacaa21be50b15b9d5d34fa9d55dfd125d8", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:bd730a6479baa42060a62b9c7346dfe21ce28e1a8a432342aa5f302c2cf8ef86", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:01131128229571749a7f5df2e65e22e9850789bfe386926cb34e91153ca9e88c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:edb0f4d496b429a2b09ff9b1d74bd30126b5ee2265a4370f6e992cf9d696de0e", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:b28911955f6731646cd779f6b89c2255238c3e60e1b93d227ce588484694f755", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8bc2fc39e9a6cef06df178607ff3e17604e86d709575d37a60de5c1fd2b9fead", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:6980bdebf1af99aa6822dc970cd6d5a5b430381aa11e96e40244db39265b5e4f", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:3b235676dff7abd25b2b57fa770833d05561bdd24216f4de1202e9ced52a4f4a", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:33be40fa2b50df5f7234ead34a6471ff1eea62de62445e509c28e5bc8a730364", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:0d4fd8a1f74c8e46c18a93794b305dcccf3d50e9db095b659d996712e2905dc0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:d4f61bea290cce978cbb1653866414f9f848bc56ee6491cf022e9131dd2ff5fe", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:6f174abacc65b0de9248c39a31210eecb6fdbcd15ecff5bc254fb0d366f83806", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5053d74b0f4734131234b4faf6cf7815a725bfd5b73b6acf07deb77a3cced1e2", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:0538a3f6b5c469223bfb2740d7365838eedf7ef65b89353645e9d3bf6e17253c", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:f8b11739918f67700fbef58c2ab5c87a61413acf6aa8b650a014285c0c3684e2", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:a3b7c308fe73bec0edcfceb85e1e1799927a4d7e25ec4314649b447f670a49ef", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:dd752acc5dc10414a4708dc0bc655d7861bfa74bb20863aa10335dacc53357ba", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:bd4724acfb4c0ec9283595e24e29f9926c18e7af0169fd5eb344ed00de6bf393", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f1e989b744c90ee0be0978d34da65a84fdd81e5b6aef8ba116560bc157d73f0a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:d2bd05813a6a5257688f9bb486a1bda49fb169eab4f16c3d503e01883c52bd11", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:03597af2e3a916f7c4eb83e1b360b24cad9e86ce814494bd68da602991a70e7e", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:66173ad07abd0c8bb7e529350399507549601923afeca8e2ff2b0f80cb9992e3", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:3399e9663584d6d1032992f903b7aba4f96f4f0b7a5971faf90eb816cc7655b3", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:c1107cdfed17e78cabd9094b3f6aa1d9537f70bb4ddfc236983cc5fdc167e8ca", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c73d5f710032819a6456d1020ef5fc8bb683aeb167b6169f56a295c31b14c72d", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:d733f8dbbcdcfa398f6f139831236fa6cd0abdf132090435bb647081d2f6a785", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:44657ecdfa5bc1235f85a50222e025ac4721b24a01af6d167525f7cb0a580c31", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:92ded69a63e7ecda34b1d8ef17ffae8c9e8075046a724f8f8242f4b66d2eff19", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:5dc833e3b3dd31a1af446c7883f6a2b92c40b9192d072ef5de2fda7ddf4f84ad", "enabled": 1 } }, "passt": { "200": { "checksum": "sha256:d778011449f026622cc05ab496a39b6aa55a7e6447621a5ff7afc242b155b0e2", "enabled": 1 } }, "passt-repair": { "200": { "checksum": "sha256:7db523cb1e14c32587544907a28237c09c418307c349a9c6c5a0095c9ef22533", "enabled": 1 } }, "pasta": { "200": { "checksum": "sha256:cbdee1f9990db7defe1393b55569dcf01a84786f38a49e923b023c7c87bc2571", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:924bf0bf4f0b2ea9d633ef46f55793acb2eb3da6379bacd355814507e5ddf67a", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:8d6835bdf52f73dfd1acf73ce13ea8325b0bd3d0107b0ba86953fe2fbee20330", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:016a326cb4a747756723c0e7d675e4992e8abfd1f51a6c06aa93066bf45412ea", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:ee292c9774f2109ffcef5b2a1ac7ae68e44f719ba40d155f84287fe03a6c01af", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:5d77621f8da0f789c1b9ea9ac24925e02e0a7fe2a3a26cd7e5f46085277041bc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:6cfcf3051765f61e954cd243d3b652cee14d378e4925b12569512e5ae815b40e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:07669cb2df2c61ec4cb621f3332f77f351facaaf5232a8a72c61a5ee7bb44d71", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:24e235787e311d82b99df7b41d724da0e18edc3bc6443f9f83f8d6247e33cbac", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:2c0350e46ff4eb97af27f63025763c565d7097457d4cde6f46088afe7f8929e9", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6c7d4f4b8227aa55a5f142bbb8faef130cd10710101eb6f0aacb62547db5f49b", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:d59109d36dd2868269eb18631e37feb5981db0aa780c55f7e0fb66d897e4f48c", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:93a95273e16837c24572e635d58446ed1162ecbfed59695e866058df4dcbec2c", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:f878b2cf560b4bdff33fedf8c8f2011af390b77ee8f9416fe93ebf46153c97d0", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:7c128725a61bd30f3e35f39b9a832e5cd3ef435dde58241616b24e28f67ffbe1", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:60153b9f850c92927ce2a61becd9c248ef56dc0ceb7ba990185b98eaa9b011bd", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:ae9f1c81d0877b9f40c9d9bb5b862b7c58c73da9045f850a0a72d1b982fada35", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:8d550f8b9e80beafd06bc1392e60ecba8e922f8d0e609fb6674de5cf27c8d772", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ff82ca8bf6365948aeaf3c14fbc7ea9a212074d1462a31aa676b542d0d76c882", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:664148c3f8d4a649714cdbcf15e4862a5e648e0aea83d4530d23866c78c8d8d0", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:d58fb38422b37d406bf3e79136e3a94a40885c08f9c1591975c9a7495b7f606d", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:8194c7df0ea3abd18f07481b0181e01c5fddb21ebb594ed5b20bc1ced555fb27", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:ef1377e6864d9b5049866f6f0c3986e474499f1bb0082e9430f208e2c9d84b54", "enabled": 1 } }, "qgs": { "100": { "checksum": "sha256:add48a13d9b3cc5c82c73c2ca7d72db10b074970c14e26d58b88f670f9221655", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:c5e1779123c640fc55da0871bfd96bb124d8c9b50b9065136c025c83364f453e", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:71a7ff78c03cde811d19a4c115de8a898007bdf437a9350d4708b3f9142481c6", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:e66ffb20855170cda4ec60840ce05e73d69dcc54330c86b24dd89ee96bcd1d73", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:682232f167f6ecaafcb051df5557addc52b814e923f143bf37a2035fb17315ae", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:0fede9cbfe184d19e8ac7bb68a1ce8a110aa45898ca782e3c9daa5649a476fba", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:01fbaabbb5b83721fe19a813401d94510f6fb260714c3adcc40d54fbb994ef70", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:a8e3e2b90df3917dbaf684a1bdf72432d8bf2aa6ec41233e06a2eaf02aa81686", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:8d5ee75190133ca16f3931a80ba1202b6cc171e6a3b1cba6dc5788a33bc84e0a", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:fdf6e82be7b620aaea9c8928edc39344d32dd9b1c4e0f78a6c6fba39bc005b6d", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4788c42c425e54a8dedb4882a6a2bd2183ad72f980f4217299be830afe275069", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:7d65968a2e3d186de718f9f6604f2cce60bd08bab6dbe0e60f60222b228a5744", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:78d9abb7263a5c028d7065c0cadcfe14daf3b4aa064e679458f3bf271a69d2e5", "enabled": 1 } }, "redfish-finder": { "100": { "checksum": "sha256:e05fc89dc14e7a723647597786aa62adc255ca1301474ff0c29dff49e4176e4d", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:825a97c385fbcbfff670278b26a17f91bbfa8585f2219efc48781e0e510bf213", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:695b31e12a82435b57e11459e99444fec8d09aba051b1a12b8efa765608dc719", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:892885a058782b7fdfb5d86e5ec3ecca261363a14a2254652c6a7ff8a52807ae", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:39bc17cbd08c0377eb935fd0ca86b6542752c5ce07cb0f9d9e5d8adfe4306a13", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:3da6785a2c37296fb1ba2a1b621ebccc9e0837d9acf69b3442e75f3a60f2a484", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:912bf2ea73ebbfd1d5fefee37b336a9002345d01f8eb54cb164c28160fc4f1c1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:66b1ecc6382afc5032df2921281550af0431befd8cd517c4f8c68cab2eac0e11", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:4ed93113b5ea0760e89533919f86cf1dd26b5587a9d7cf8bd951896fc77d7fa9", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:008a840aa2183d0fbf1b3f3bb9542a7ba51c03a1e3a415b188ca49d2e4ed7e51", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:3ba51ade82ac9113ee060bb118c88deccc4a7732312c57576fd72a70f40154aa", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:b4fc4fbb8572088eb785b643f5d103d5791af96d37e6cce850d671d9291bf70f", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:6b4e7757f0422a2c54d93e920ff7b2c5bd894d495065b3827a741a768f042b18", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:702d5df73a6865bc249ffb537ad7a0d2388e1540716e4b2f7e844485870e37bb", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:4cfda0dd9868ff0890c7a612f07c282a8cbe4a319c766d7cf842ed639fc2b34c", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:64c59a71e1786fba000398e05773c83fbbd9f92c0341e52cbefd1386357b4e16", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:2f0c18590911b20c58bbc9db0c9c0c471f4d66171f7400079a2e956366580e24", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:f19a726a7c78ddd9aafcf8d2c4b6a57bd05fdc8450a91119e1f0d0abc09151dd", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:b29d987a469d59767e7120202e2abad06865eaa84d3eb61d2ae6b7a78c1d6dca", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:44e8808dad842eb55d51c204374ef445bd8515701db580d2c91f06ca9949f2f6", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:4b1585496c5777fe140f76f11a62df0ddad219336fac090139efbc368520d38c", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:2a990092d1cf38541a49375e9e605d82515a34e19b9ab6b70392afb596e0c612", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:80bda9a30a4b5ab4b6b14d7f6c92efbfd5a63658a4b44565a02c2c552cf4a28c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:405780af5278be0dd7f89425f91ca1c48527743d2b6876bdbdcc7545d487dc09", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:f76f5b094e42967dc240e161cb187bc528f2f2a3ee2ab93c53c0b15d820c0921", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:99c31c501752dfcb8460f44b4e363b9d57b85c3ad422a951f13f2d42e5f9f54b", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:8361387196f6c48bbed95c77561bdd324ab96356d6dd0f4874832accc67738a4", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:89169ffed763d6257769d5ed83185a9eb376145baa60dbf01b4088f37aa663bb", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:7727a62bcf612392c76d46f3cc8c22f33c3c87c30a320805ac9844ce68409ecf", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:1ad633f30ae0f80052b31090652780dab90b10696c098ac81ea831035a652835", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:c9cbfb3894148ab693f0c850232f3a1b1aefe5c5cf5f4a06bc74d44cdd2b52f5", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:67b8654cf2404ad763f5343ad3ded35f198c26e99b8a9a150143911acc89ac6c", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:6ce5485715b3caab30a72313601de971e7118bc2997a2edf6ce7b229e51c2483", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:9ff7693f6fb994a0a53dc46230b7ce6c4fe6dccc2b2ec2c8ba49f7c1e3f24eea", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c888a4b5fc698c1bf7551bfbc6d6ea7673a5f7f41d2467af7e15ce634c71e2be", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:1ed05c5ce069437c9de8a57326a0329d883ec753f3a11fe4f70a43ad212ec482", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:191a531a60c27b33fadbdb48213980f03b68efec3287545eff3592fcdf4bf686", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:e6f726edf701657c80853712b94a4bf5dd0430254d93db45804e60a243c51818", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:8a6ef7c3d8ee76e112224e0c4e0b91572db8c85f547bbed6d7ce3f6f6d4383de", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:cc162915cf1fc3cc66616c3224e9e848485198a28868c237adc9d7077791cba8", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:74b5c41b13bd849ce82040012f557fec4b9cfad3a9072f9f17f78400868da558", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:91acb71305dfde220ce7574e2ac67af16e6f8630639dc66d494cbf8120d2d07a", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:9b8a5c1ff4c21846701eb5e0603cc022f4530c568db6d9fab392e41c0ed64720", "enabled": 1 } }, "slrnpull": { "100": { "checksum": "sha256:bcf004c239b72d23fb4f1e5842272bc20f287cd312ed394464db8cb9218f4377", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:fc3eaf23ee99b98d2ff17a5df04776e8553f490d7f57d49a24061cd49bfaa997", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:17d8fa5ce4b9402dfb10ad431241cb2a5a1b2f726caa03ae7f1d7d410c2ab6ae", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:6506687dbaf850c784d6f2af14197d3c1768514fad98e08fea69e92a780ff65f", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:59b6f3643d2f404ef03d749628b6872fd650b5b10851862b4accad8276bc6f29", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:34b45f69552f2b284b1f6e0876e4a96d1c05c28e4ab42d2bc2a241c03fa73309", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:35ef9c580c4071208af6169ae1059bfee51938d36dbec2bc2354d51ed5dc505d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:5594f07c04c9057b74df1612012c2515265ee04d58b11bfa46a73531b703c1f7", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:b00a50f92d0e8ef2789d03756c7bee69f983edfc4a3f409304835ad25133e3a4", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:874410d4edbbd1f73ef0e69ea40e93054a5d65cfe1556b00f6b474b928400a39", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:400e9b1c9ace97d2e43b5916b453d189a5c6f60133876f15672a48607edfd0ba", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:66beadff1a4ed7e48b3f3cee1444f5f1aaa833d212cdc76068f2f306b8455970", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:fd8c0b8cc073d8025ab8754b7885e0375b4e700dd3fcc921c45666829b652de5", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:1b2a0e330daa04838742fdcd50a9b539072c58d48e949e4a3ce7933da47cbe3c", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:2ab07a8deeb7ef4cf09f94bd2ba250166a4d016bd9c581ddd470ab2784baf5e3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:e7caeb60df6f2002f7be4adc7a1506b6fb585e6bb9f4585381c115a90bff4a15", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:836d01ecc314a2b2b4eaaea69ce1e4a03f3274bd8bd25e2b64d0329e6f9d8f32", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:e2c86cd06c00d3ed79b9f7a602b18593d5929156df58e761a04a3cc3ba8be891", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:67fec37a17724a9b059f936b70c199d96906b9bbf703dd8a1670852dbfc7715f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:dd116a718e125ba88d28936b746a2292088080254134d2001084e2d252ce9379", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:df73dbc3f1e232bb5f4d3ba0bd1850eae3c3bc401508b1819c0989b8f67f8033", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2eb63b8ac8f3038eb1ff3bc18fc5923dee4ac3f609d8a14791300ae835249a9a", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:d342a188298c1fcd4df99c4235985c50ba2f02a4e53d01cef3de48bc31464ceb", "enabled": 1 } }, "switcheroo": { "100": { "checksum": "sha256:f8f67d2c990489a09a436dbd72704b13d6617fdbbb8c5c2c040a85b584de6a7b", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:a8f135ef10becc2a2ffd4e7faf89932ed4aff16331eb62d59e52ff2a5c0966e7", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:fc1ca3d8b12406dfef9f012c9275817169fbfafc411969e60d357be3b35835a8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:ab2acab6cbf273ed7e78e577b0e2a85225adba387b1a8908b180b07adb950e6f", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:815d229f0b5a8f8a44cd511b5927febb002596a8aad1b85406d674e59378a0e5", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:2a643246c63d64d4c57f3877ff3daca2637b195330920c2efd840ebade3fc20b", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:f3896d2de3794d7dd54fea03cbebcdf4e6b63bcc512d2fc14433b3be400f4188", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bbfd79953db88f6db10739803d29b003d83311a21c75604d64ed9fae26da541a", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:71c6423e6318342438fea1ba8a38751b5741b4482ca8ed075dbdd36bc6fda9aa", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:f482585c8f26517c6ed8e9203bec4adadec8ebc65840089d7483e31ee24fa679", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:a5312c216b56620ca8e69679e99275e793b3de9b6e524db1a5678d22b9909056", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:3a4e10afbea76bb0a825f3e10b6be09c1e380f19737aef7a6171a9744c15b33f", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:58aac19837bee6fd1c5e3d1e2a9c9900c56b9aff34b643fa9d958399152afbce", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:46f7b10654f710546a61324618f68b753849ea0b6a7e11f431922a5c848fae89", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:f3d5b0012a6f6d0255e831f608cf0d77f1af38a975b222a7f71cf0821f359246", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:2d749a0f3d39317412feb3388eec0eacb60859891ea7da50373271f03ab66c5a", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:5b1a3e31fee719423530b8c7c07b6649ab539d38f2b446a3e6d3f029a65696ae", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:561814e9fa4d9ffa1be3bcc8e27ee1a50260293a17de3db6eb9d4a83e14e8faf", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:48fac9542e02d0c8f461e03905339795331b4fcb2082e830e83189e50af59040", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:80d84cb83923e4d5d6b9870b4311a67c87609f010c5ffcdcb00ef6e926a8d785", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:33a8bba7a36dc094b6220c0dfe282a9e57ff280511965c99d654f4e584f960f0", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:38e42ce3f0baba47216f3b50d7bec9ac531a11d659c8807d0bb43b5e5b4ce873", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e9267049c61e87edd481214c8cedfc02cb396789c52a150b58d8fbf0401bd455", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:2f55ef3a5145328ed09f316753cec5b85f67c1b43902be5152fc57c4b95c3026", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:51ec0952bf860ec23e3bfdfd53f3bfad841a4e5b560cc25a9548c9b207504194", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ef06a218a285a5a01a1e354d6a40f826815203dc323d00ad68e29f85162c24e7", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:f71781a997aa0d0df5c9baa600b6212105c75cc290bf634a198ed0d5b42a668d", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:f58eadcb76889082e3a109afa993bc7eeed39675991d171a13744bc8b61c279a", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:4b8e317234ae08c1f4a80133c8abba35d412f5797db3c4515d0cf051c35af6bd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:3c2a65084450b2459115a69bb1d382e452a1da63080ac7fdc85bcac36affe1c7", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:ca220cb87bf9790b38738b6f08cc800a2fd0e083960aa4770c9385b897cd31cd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:cfcecf645d2d8a59f98135435d535133a39f70f46d9b47a65b15e88a3805861a", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:91a33317bdd39510dd305d768e2791d08b207d8384bfca22322ec49f5b26f9bd", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:c500e8df08994b81cc1d743db684060d03bfe4465fc12eea9a4af83a69af307b", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:db1d0917d263b447f9a744edfd4ebfeca697182c853295c7eaf49f1270218858", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:84679e67832759be8220885abe3fa0157305fc8f50efa604b1343e99907925dc", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:5ca3d53e3b62d5973442d210faf9b9f5f9b5f4935a74074ce4b18836c8d78b19", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d8fadd99af0d343c815f006330529911a5106641ed9c7d22a2eb72e0d9d55d2d", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:664ab4aa1e1eca422d2c627a22a9631ac348221893713bd9a4d97a628094b1b0", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e68a71817476b5ebb8ae2e13e9ea9418a31dd64ffe4e156258cb77029635cefa", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:f45c6d89a3305814e44a05c0d8c8f8a4ce8a923d721e83c9579f76d8d8cd909d", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:8d828eef8065f2486b815aea04ed491419e3bf17508cf0ce595fca71f872ba38", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:76a11dd14f578f940e874ab4d68ca1370ddfcb2585b6a3a955569fadb77d269f", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:17759c6e3a6229e4a40be0b8121751d768f00fd6ea0a872f4fe65bebe2280b30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:c9c26249a11c4bace4efa998ae826c3cd5178a19d323886a62b7e355ca3d8260", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:ea826918681193d37db69c814ee4c753fef3fcca809cd0fad6f924f829eeb9eb", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:a9e221f7f656f9f0b4937c2bd0f7b93124c7f48f4c88fe8ba608db1eaa5f05d1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:034bceb856cf79ac9329a4affb6cc53cf29c5bebb089c0ddd486a76148812b89", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ea40fa389e6fc510f40994b9b4272a6b985c80064b8a4d702d5813d5252487f5", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:308910f855a076bdf38241880815f6640dfba4b21ef1be58112deec3ed858d16", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:dd07546e8a114e1b7f5056d4c5b0f1256050fe93e867fbbb6c5f52d2c6f77ec6", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:870a818c9c3a4e4d24386bfc3fc7565af1c8aeec605b3d4cd819169172bb3e03", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:476c08aa43723ad6bb98a7254bc6cdad6ddab4aa63336719c192bbf6f5ba6700", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:e27315e58a548c06561117f2dcf86c67e6937dc1ef2071ee612975457091e40c", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:a077f44cc6d16684de9a93061ee0f7b212e3f729fdbdf594dee573fe5c30817d", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:8228eda847eeaa7529b089edb8c64763d03100e84117526a67fbb41ea006a2b0", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:170 Saturday 07 March 2026 11:45:16 -0500 (0:00:00.148) 0:02:01.906 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:183 Saturday 07 March 2026 11:45:16 -0500 (0:00:00.084) 0:02:01.990 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:191 Saturday 07 March 2026 11:45:16 -0500 (0:00:00.035) 0:02:02.026 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Saturday 07 March 2026 11:45:16 -0500 (0:00:00.049) 0:02:02.075 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Saturday 07 March 2026 11:45:16 -0500 (0:00:00.039) 0:02:02.115 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Saturday 07 March 2026 11:45:16 -0500 (0:00:00.031) 0:02:02.147 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Saturday 07 March 2026 11:45:16 -0500 (0:00:00.030) 0:02:02.177 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Saturday 07 March 2026 11:45:16 -0500 (0:00:00.032) 0:02:02.210 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Saturday 07 March 2026 11:45:16 -0500 (0:00:00.131) 0:02:02.342 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "state": "absent" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: bogus\nspec:\n containers:\n - name: bogus\n image: quay.io/libpod/testimage:20210610\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Saturday 07 March 2026 11:45:16 -0500 (0:00:00.059) 0:02:02.401 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "bogus" }, "spec": { "containers": [ { "image": "quay.io/libpod/testimage:20210610", "name": "bogus" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Saturday 07 March 2026 11:45:16 -0500 (0:00:00.066) 0:02:02.468 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_name": "bogus", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:42 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.061) 0:02:02.530 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:10 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.066) 0:02:02.597 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_handle_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:17 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.035) 0:02:02.632 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_handle_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:24 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.043) 0:02:02.676 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.067) 0:02:02.743 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901838.6646128, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "9117e8a5afa3220d98f04938893af461a8e3008b", "ctime": 1772901831.1052737, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9335075, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1771804800.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1635770157", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.394) 0:02:03.137 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.041) 0:02:03.178 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.043) 0:02:03.222 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:73 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.041) 0:02:03.263 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:78 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.043) 0:02:03.307 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:83 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.038) 0:02:03.345 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:93 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.040) 0:02:03.386 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:100 Saturday 07 March 2026 11:45:17 -0500 (0:00:00.107) 0:02:03.493 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:48 Saturday 07 March 2026 11:45:18 -0500 (0:00:00.041) 0:02:03.535 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube is none or __podman_kube | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:57 Saturday 07 March 2026 11:45:18 -0500 (0:00:00.047) 0:02:03.582 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_systemd_scope": "system", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Saturday 07 March 2026 11:45:18 -0500 (0:00:00.059) 0:02:03.642 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/etc/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:70 Saturday 07 March 2026 11:45:18 -0500 (0:00:00.054) 0:02:03.697 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/bogus.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:74 Saturday 07 March 2026 11:45:18 -0500 (0:00:00.067) 0:02:03.764 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:81 Saturday 07 March 2026 11:45:18 -0500 (0:00:00.066) 0:02:03.831 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "systemd-escape", "--template", "podman-kube@.service", "/etc/containers/ansible-kubernetes.d/bogus.yml" ], "delta": "0:00:00.005721", "end": "2026-03-07 11:45:18.649830", "rc": 0, "start": "2026-03-07 11:45:18.644109" } STDOUT: podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:89 Saturday 07 March 2026 11:45:18 -0500 (0:00:00.387) 0:02:04.219 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:2 Saturday 07 March 2026 11:45:18 -0500 (0:00:00.051) 0:02:04.270 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:10 Saturday 07 March 2026 11:45:18 -0500 (0:00:00.021) 0:02:04.291 ******** ok: [managed-node2] => { "changed": false, "enabled": false, "failed_when_result": false, "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-bogus.yml.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target \"system-podman\\\\x2dkube.slice\" -.mount systemd-journald.socket network-online.target sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "\"man:podman-kube-play(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "Environment": "\"PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-bogus.yml.service\"", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/bogus.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/bogus.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/bogus.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/bogus.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-bogus.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3043594240", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-bogus.yml.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "\"system-podman\\\\x2dkube.slice\" -.mount sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 10s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Check if kube file exists] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:30 Saturday 07 March 2026 11:45:19 -0500 (0:00:00.550) 0:02:04.842 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901880.340504, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f8266a972ed3be7e204d2a67883fe3a22b8dbf18", "ctime": 1772901879.864642, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 310378703, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1772901879.5975025, "nlink": 1, "path": "/etc/containers/ansible-kubernetes.d/bogus.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 237, "uid": 0, "version": "3786036565", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Remove pod/containers] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:40 Saturday 07 March 2026 11:45:19 -0500 (0:00:00.374) 0:02:05.216 ******** changed: [managed-node2] => { "actions": [ "/usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/bogus.yml" ], "changed": true, "failed_when_result": false } STDOUT: Pods stopped: a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0 Pods removed: a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0 Secrets removed: Volumes removed: TASK [fedora.linux_system_roles.podman : Remove kubernetes yaml file] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:59 Saturday 07 March 2026 11:45:20 -0500 (0:00:00.522) 0:02:05.739 ******** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/ansible-kubernetes.d/bogus.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:64 Saturday 07 March 2026 11:45:20 -0500 (0:00:00.419) 0:02:06.158 ******** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "-f" ], "delta": "0:00:00.030174", "end": "2026-03-07 11:45:20.991086", "rc": 0, "start": "2026-03-07 11:45:20.960912" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:75 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.413) 0:02:06.572 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:13 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.061) 0:02:06.633 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.042) 0:02:06.675 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.039) 0:02:06.715 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:93 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.039) 0:02:06.755 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.038) 0:02:06.794 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.033) 0:02:06.828 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:205 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.036) 0:02:06.864 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:214 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.035) 0:02:06.899 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Run role] **************************************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:291 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.146) 0:02:07.046 ******** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.114) 0:02:07.161 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.061) 0:02:07.222 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.054) 0:02:07.277 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.046) 0:02:07.323 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.040) 0:02:07.363 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.045) 0:02:07.408 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 07 March 2026 11:45:21 -0500 (0:00:00.044) 0:02:07.453 ******** skipping: [managed-node2] => (item=RedHat.yml) => { "__vars_file": "RedHat.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "__vars_file": "CentOS.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "__vars_file": "CentOS_10.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "__vars_file": "CentOS_10.yml", "ansible_loop_var": "__vars_file", "changed": false, "false_condition": "__vars_file is file", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.podman : Run systemctl] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:52 Saturday 07 March 2026 11:45:22 -0500 (0:00:00.103) 0:02:07.556 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Require installed systemd] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:60 Saturday 07 March 2026 11:45:22 -0500 (0:00:00.042) 0:02:07.599 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:65 Saturday 07 March 2026 11:45:22 -0500 (0:00:00.046) 0:02:07.646 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 07 March 2026 11:45:22 -0500 (0:00:00.042) 0:02:07.688 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 07 March 2026 11:45:23 -0500 (0:00:01.052) 0:02:08.740 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 07 March 2026 11:45:23 -0500 (0:00:00.037) 0:02:08.778 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 07 March 2026 11:45:23 -0500 (0:00:00.054) 0:02:08.832 ******** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 07 March 2026 11:45:23 -0500 (0:00:00.036) 0:02:08.869 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 07 March 2026 11:45:23 -0500 (0:00:00.038) 0:02:08.907 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 07 March 2026 11:45:23 -0500 (0:00:00.036) 0:02:08.944 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.021270", "end": "2026-03-07 11:45:23.768745", "rc": 0, "start": "2026-03-07 11:45:23.747475" } STDOUT: podman version 5.8.0 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 07 March 2026 11:45:23 -0500 (0:00:00.405) 0:02:09.349 ******** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.8.0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 07 March 2026 11:45:23 -0500 (0:00:00.119) 0:02:09.468 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.044) 0:02:09.513 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(podman_quadlet_specs | length > 0) or (podman_secrets | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.041) 0:02:09.554 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.031) 0:02:09.586 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.055) 0:02:09.641 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.047) 0:02:09.688 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:10 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.076) 0:02:09.765 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_handle_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:17 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.052) 0:02:09.817 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_handle_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:24 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.053) 0:02:09.870 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.061) 0:02:09.932 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901838.6646128, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "9117e8a5afa3220d98f04938893af461a8e3008b", "ctime": 1772901831.1052737, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9335075, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1771804800.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1635770157", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.418) 0:02:10.350 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.044) 0:02:10.394 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.043) 0:02:10.438 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:73 Saturday 07 March 2026 11:45:24 -0500 (0:00:00.043) 0:02:10.482 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:78 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.042) 0:02:10.524 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:83 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.044) 0:02:10.568 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:93 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.042) 0:02:10.610 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:100 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.039) 0:02:10.650 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.042) 0:02:10.693 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.109) 0:02:10.803 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.070) 0:02:10.873 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.042) 0:02:10.915 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.108) 0:02:11.024 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.093) 0:02:11.117 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.035) 0:02:11.152 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.042) 0:02:11.195 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.073) 0:02:11.269 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.037) 0:02:11.307 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.039) 0:02:11.346 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.073) 0:02:11.420 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.040) 0:02:11.460 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Saturday 07 March 2026 11:45:25 -0500 (0:00:00.037) 0:02:11.497 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.035) 0:02:11.532 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.033) 0:02:11.566 ******** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.140) 0:02:11.706 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:2 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.063) 0:02:11.769 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:10 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.056) 0:02:11.826 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_ostree is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:15 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.042) 0:02:11.868 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_ostree is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:22 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.045) 0:02:11.914 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:27 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.028) 0:02:11.943 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set platform/version specific variables] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:31 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.028) 0:02:11.971 ******** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_10.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_10.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_10.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.199) 0:02:12.170 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Run systemctl] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:5 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.042) 0:02:12.213 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Require installed systemd] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:12 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.026) 0:02:12.240 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:17 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.031) 0:02:12.271 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 07 March 2026 11:45:26 -0500 (0:00:00.039) 0:02:12.310 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34 Saturday 07 March 2026 11:45:27 -0500 (0:00:00.837) 0:02:13.148 ******** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:39 Saturday 07 March 2026 11:45:27 -0500 (0:00:00.041) 0:02:13.190 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:44 Saturday 07 March 2026 11:45:27 -0500 (0:00:00.023) 0:02:13.213 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:8 Saturday 07 March 2026 11:45:27 -0500 (0:00:00.024) 0:02:13.238 ******** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:17 Saturday 07 March 2026 11:45:27 -0500 (0:00:00.033) 0:02:13.272 ******** skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'nftables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'iptables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'ufw', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:27 Saturday 07 March 2026 11:45:27 -0500 (0:00:00.034) 0:02:13.306 ******** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2026-03-07 11:44:04 EST", "ActiveEnterTimestampMonotonic": "560506770", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service sysinit.target system.slice dbus.socket dbus-broker.service basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2026-03-07 11:44:03 EST", "AssertTimestampMonotonic": "559785386", "Before": "network-pre.target shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "518335000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2026-03-07 11:44:03 EST", "ConditionTimestampMonotonic": "559785383", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ip6tables.service iptables.service shutdown.target ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4787", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainHandoffTimestampMonotonic": "559816912", "ExecMainPID": "14191", "ExecMainStartTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainStartTimestampMonotonic": "559787917", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2026-03-07 11:44:03 EST", "InactiveExitTimestampMonotonic": "559788901", "InvocationID": "36851f8a440d4a018e15c301930b223c", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "14191", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3067838464", "MemoryCurrent": "34086912", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34353152", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "tmpfs", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target dbus.socket dbus-broker.service", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2026-03-07 11:44:04 EST", "StateChangeTimestampMonotonic": "560506770", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:33 Saturday 07 March 2026 11:45:28 -0500 (0:00:00.588) 0:02:13.895 ******** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2026-03-07 11:44:04 EST", "ActiveEnterTimestampMonotonic": "560506770", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service sysinit.target system.slice dbus.socket dbus-broker.service basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2026-03-07 11:44:03 EST", "AssertTimestampMonotonic": "559785386", "Before": "network-pre.target shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "518335000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2026-03-07 11:44:03 EST", "ConditionTimestampMonotonic": "559785383", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ip6tables.service iptables.service shutdown.target ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4787", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainHandoffTimestampMonotonic": "559816912", "ExecMainPID": "14191", "ExecMainStartTimestamp": "Sat 2026-03-07 11:44:03 EST", "ExecMainStartTimestampMonotonic": "559787917", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[Sat 2026-03-07 11:44:03 EST] ; stop_time=[n/a] ; pid=14191 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2026-03-07 11:44:03 EST", "InactiveExitTimestampMonotonic": "559788901", "InvocationID": "36851f8a440d4a018e15c301930b223c", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "14191", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3069054976", "MemoryCurrent": "34086912", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34353152", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "tmpfs", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target dbus.socket dbus-broker.service", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2026-03-07 11:44:04 EST", "StateChangeTimestampMonotonic": "560506770", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:39 Saturday 07 March 2026 11:45:28 -0500 (0:00:00.566) 0:02:14.461 ******** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:48 Saturday 07 March 2026 11:45:29 -0500 (0:00:00.063) 0:02:14.524 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:61 Saturday 07 March 2026 11:45:29 -0500 (0:00:00.036) 0:02:14.561 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:77 Saturday 07 March 2026 11:45:29 -0500 (0:00:00.037) 0:02:14.599 ******** ok: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "15001-15003/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 07 March 2026 11:45:29 -0500 (0:00:00.549) 0:02:15.148 ******** skipping: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "port": "15001-15003/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:141 Saturday 07 March 2026 11:45:29 -0500 (0:00:00.041) 0:02:15.190 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:150 Saturday 07 March 2026 11:45:29 -0500 (0:00:00.034) 0:02:15.225 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:156 Saturday 07 March 2026 11:45:29 -0500 (0:00:00.046) 0:02:15.272 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:165 Saturday 07 March 2026 11:45:29 -0500 (0:00:00.036) 0:02:15.308 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:176 Saturday 07 March 2026 11:45:29 -0500 (0:00:00.036) 0:02:15.345 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:182 Saturday 07 March 2026 11:45:29 -0500 (0:00:00.036) 0:02:15.381 ******** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Saturday 07 March 2026 11:45:29 -0500 (0:00:00.108) 0:02:15.489 ******** redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean included: fedora.linux_system_roles.selinux for managed-node2 TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Saturday 07 March 2026 11:45:30 -0500 (0:00:00.123) 0:02:15.612 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Saturday 07 March 2026 11:45:30 -0500 (0:00:00.032) 0:02:15.645 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Saturday 07 March 2026 11:45:30 -0500 (0:00:00.031) 0:02:15.676 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Saturday 07 March 2026 11:45:30 -0500 (0:00:00.042) 0:02:15.719 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Saturday 07 March 2026 11:45:30 -0500 (0:00:00.028) 0:02:15.748 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Saturday 07 March 2026 11:45:30 -0500 (0:00:00.026) 0:02:15.775 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Saturday 07 March 2026 11:45:30 -0500 (0:00:00.027) 0:02:15.802 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Saturday 07 March 2026 11:45:30 -0500 (0:00:00.026) 0:02:15.829 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['python_version'] is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Saturday 07 March 2026 11:45:30 -0500 (0:00:00.032) 0:02:15.861 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: python3-libselinux python3-policycoreutils TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Saturday 07 March 2026 11:45:31 -0500 (0:00:00.810) 0:02:16.672 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['os_family'] == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure grubby used to modify selinux kernel parameter] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Saturday 07 March 2026 11:45:31 -0500 (0:00:00.051) 0:02:16.723 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: grubby TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:67 Saturday 07 March 2026 11:45:32 -0500 (0:00:00.814) 0:02:17.537 ******** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: policycoreutils-python-utils TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:81 Saturday 07 March 2026 11:45:32 -0500 (0:00:00.847) 0:02:18.385 ******** skipping: [managed-node2] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:86 Saturday 07 March 2026 11:45:32 -0500 (0:00:00.042) 0:02:18.428 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:91 Saturday 07 March 2026 11:45:32 -0500 (0:00:00.040) 0:02:18.468 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:98 Saturday 07 March 2026 11:45:32 -0500 (0:00:00.038) 0:02:18.507 ******** ok: [managed-node2] TASK [fedora.linux_system_roles.selinux : Run systemctl] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:8 Saturday 07 March 2026 11:45:33 -0500 (0:00:00.900) 0:02:19.407 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Require installed systemd] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:15 Saturday 07 March 2026 11:45:33 -0500 (0:00:00.026) 0:02:19.433 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:20 Saturday 07 March 2026 11:45:33 -0500 (0:00:00.027) 0:02:19.461 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_booted is not defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:29 Saturday 07 March 2026 11:45:33 -0500 (0:00:00.026) 0:02:19.487 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(not selinux_state is none and selinux_state | length > 0) or (not selinux_policy is none and selinux_policy | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:40 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.034) 0:02:19.522 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_facts['selinux']['status'] == \"disabled\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:52 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.152) 0:02:19.675 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [Add or remove selinux=0 from args as needed] ***************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:56 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.051) 0:02:19.727 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __update_kernel_param", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:70 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.040) 0:02:19.768 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:77 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.027) 0:02:19.795 ******** skipping: [managed-node2] => { "false_condition": "ansible_facts['selinux']['status'] == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:82 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.034) 0:02:19.830 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:90 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.024) 0:02:19.855 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:95 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.024) 0:02:19.880 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:100 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.022) 0:02:19.902 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:105 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.022) 0:02:19.925 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:110 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.024) 0:02:19.949 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:121 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.020) 0:02:19.970 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:134 Saturday 07 March 2026 11:45:34 -0500 (0:00:00.021) 0:02:19.991 ******** ok: [managed-node2] => (item={'ports': '15001-15003', 'setype': 'http_port_t'}) => { "__selinux_item": { "ports": "15001-15003", "setype": "http_port_t" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "15001-15003" ], "proto": "tcp", "setype": "http_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:146 Saturday 07 March 2026 11:45:35 -0500 (0:00:00.622) 0:02:20.613 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 Saturday 07 March 2026 11:45:35 -0500 (0:00:00.036) 0:02:20.650 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:7bd953bc370c70fe9299b766f8a40a1659e03f7ef4dd6c722c3e182bc90c1c68", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:e8caedff457d24c0562673868860f813a6cf223422bc48524e7cf1e8df7ddeb6", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:1150e95aa33304027895200fbac6de5d0ec1ada237d1cf255f979bcf712831ba", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:634c80be00ac898add54ea6d59ead5a6e92e4d06a230b9b4485059070b0a3bde", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:90f08987cd8645d1bc99245841a9f2d0c9858196064df233655623d1b5cfbdde", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:c59e1e8e511ef99a0e5715ed9dd2c15ea0b522186e683ed8bf715029c4ef325c", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:ee1199b88bcd39ff6de202bdef25f1dc7292828d80856fa535fb80454dad000e", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:3b9f22d94579c8dd60f827159f6f15a2085d9bb799cbc88d7c1d23ce7a63aab4", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:449d303fa3e44bb7afa7b0a715e9566e1e33fd3368aee1b078529f0225cf56ff", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bfefb6205876b2f58e84c1952c749c146f4e2b8107a660e084614b23d60300c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:3a903d39c2d9de406f33790f234fde1f1d0b20bacae36fa0c6bfb5fee9f800c5", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:35030bf2d1dc7ec055a954de113ff7918709262d5c318040b0cbd07018e9ee88", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:5da016180d7da3fa18541f72cc69eb5c9ffebc2851ec3e6150bfd5a73153f860", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:6432b280ab64da2e35f7df339167f29bc9b9dca4c01e8e8a0c409b7a0adbd5d1", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:856e89b68ecf997f8a33e98c7e4bd2250a43f88790efba170f787434139a8c0b", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:78ff1f7154a00c128cbf5c237452baf7ed1cd46cb11378439b64432d1db58d4f", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:1e517a22f8a71ea3ef177798685dfb6359b1006205fdc97a0972ff1cf7125f40", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:454cc3d74ae64acf78ad17344d47579841f6b44266c6c3d56f58594918d2e3cc", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:4d13ddead5cb94be9c944061044e0bd56974a9db9df64f7259593b57d51386d5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:00bebe07cf015d4084870d1f0866913ae687801ca2d26e12c00df6823b3bc304", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:0cb5bf9ff94cee18667b41dc4d1b988ace9baa06ca99507a91ff3190f4e39d35", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:233825c029885cb6196920f19b27336b444411b9a15b956c95a2a07b89e9b041", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:afaeabb15d1d5e4f3d07865c5213f4a78ae5865d0f782e95d1c599e61b7ed7d3", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:2c8ef6be5667ad71b144c8bd4ec606b56cecd4e3ea1d242cbc657c1c993d99af", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dd35cbec0b5e8f81e3394a60905606fb9d986fd394ad60ccedfcdb60f0137b0b", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:e89032180210c66a288c43d2de3a47b285d38fa239226bd49ae19a1a0488f41c", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:96474cc59c799aa0e25123ea9909b4fb319a03f1b5f6cbbf1ae3dcda374815a7", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:7c8fa6c136fc6624a1dd4345c3484ffbc07c9a4be8b7543d78f0615680cb73cc", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:1b066f5d029b5584d34d95007991d218446244f994f3ff802339cd5890e48091", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:60ca58fba194f53faf1c0bc41f8eeeba9ca3de6f2da08f8940b6d1d3093e7c0f", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:815d2bba5c316d5d0334add30dca473daf3fdc85e48785c26c7b47b2ef833823", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:4ec687f59310bcb03685bec14fec451d393508d1ca5f926209ba967d42673d90", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:b6821587c3b2df8dc3ce8de9851cb1be120dfd68e5729141e7a293917029e978", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:df9850293d6833d206bfb3a875bdf69d0823daf24993b30f962da683032555e2", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:de4651616a6c8dea0dd4b018d3ab32c1506ba75188d1bcab2e04af461eea6040", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:91ab7c5c9df2a80b515c52b105f54e9247b092be7864be939d880b2f94cec862", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:bec8a93b694c60226db8744867c6f87775440937699ac0d023e06e7b7aee1d6b", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:3f5f3b049123ab0a61d1f7a7e6372bd7d2194feb212f2b5bd85a9148f21f7db6", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:0ae822bb67f347f0a88f4ec8584f394e3e10fc11363dcf34b1d583305e76c9e6", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d20dacb3b990c66c37bbf1bbd081a84a0e35f3cdf1501c27a5ec881c3d187d84", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:090e59b1324bf559d79a1ef363fe9bc1bd2adb928f6a95bb1628c92f93063415", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:80b987a686635b3e05bedf481ef892af7231100a61fbf6ca5e93da17dbb887c3", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:9fa130934871404f743c4803af509afa78e56b3ba2f83bd108564858f163329f", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:a68798c10fa97ddee5f54ac1d1281ecce65750e4e151076f4ad826187fc647a2", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4e04381e36d9df4d9f19ad718b1ddf4686f633f72b24d1161055b1f7280a81d4", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:33c562fd35e8b9fc5fdf807c488d1ac4adfa6c3b92dbbf87034a6732478e1bf7", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:8279ce237a5b4ffe5a80db09e71f06bdc8a4838910274ffc4e240ec99c185df5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:f89476b4ce6acf51cb0628609027a6c44a90db4ccde4da07505b5332a00b7c63", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:8a072efaf9d6f3af5ec04477f28ec73585274598b69d2e8f24c8180dcfacb15c", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:d5d67d239ca7cd2acbd4c5e15fbbc0f97810139fd352e9966c1e63a7d6ab5188", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a4da29d700315627bf480c63220b2b639ec0b87435f9ecca111eed86c1e019cd", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:36bd297ee2c16ed1564895422c05f51d957f09ef17120ac2efc93dc46d2d81a0", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:053f0dac3d8bc41d9dcbaf9b3f1c2e55ec313e07465db7462fdacf8fd89ce553", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:97391dbc81358c09228185edb79cadecb15bf8641fe8b6f3cf9ef970d79644ec", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:9fb7d00873d78a196b1fb639f107a92cf007803c7eaa2658eba7ed05081acb99", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:59f3c694a3ba5e60ece2b1ddeb5f5bd4f00fdaa67a5c7aa3a8fe7bd302963523", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:c05ca77b6a73640331abcf4018a9b7f2f3733f9e128bd96d7131ab7ba1fa823c", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:0e54e45a5adaa7cc24e6a273e25693919e92f498e42b8e136b7d7bf29be2d6af", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:629423401aaf5d0f529905a421a461d2f1d7ddbdb94020a140831f8873724c39", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:7ec2279bb83c931e6f379f45255a0727d207838ab55930f7595e0ab1e95b8db3", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:601b41f04bdd9789e01a1158241a17c7c4f937c88adbc75e9bf8875ee7cb0756", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:9f9cfd140d7b13b9679ba8b8d7a59366294db02d816d60af2e00a3fff1f6fed9", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:5d64fbf2f59d2c8ce842a9e8adf39877e41bb1d3e77c374681044aafbd662d7d", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:1ce15bea5149f786d9b714426a2870c43d01107f2e3a6bd4b5b324a166508dbf", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:cd287fe5971d71a4512ad52ad855f427c8b722cf7aec6e884ca646ca3da0df2b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f6643411d4b5fbc33bd87d4b3b1d4ea1b5d3659a2092cdee9ecbd4dd700af416", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:41bc4ffe76c9e5c220822efd68a2e55b1126b38f646b7c4016a36263a89e482d", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:fb9a0c7ec7a8627b89649e44dd9e2d6e4cf70166b2a55f6509f898695510376b", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:8b52f0bebd92342ee6b7e00dfe3e20d3a0f041badd4312b9b22b3d3ab0d1b3b1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:22ed092464b3757fcc58749af15cc33319f406db1747f4b28f74feb123969612", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:7633e1cf2075f6323862d89b5e0072681e64e41895b6caabbc8c6b18223dce9c", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1da30094d8664d16dee43b934829c800003e49304f1540e5b41f9fb12a2df4df", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:6cdf81585aeb903ef5da64551f6bde953aeb48f8623a8d416485847541b7b283", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:1af7de0f7c691873148f17453849b3dee97e78a1e8108755c1c133c05f29b651", "enabled": 1 } }, "distcc": { "100": { "checksum": "sha256:bd9199873915ce6fadfc570fba837765971726dac64a74e1ba74c55dc0b24067", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:1205bd72660c46019cfb8c3a899accaefb280f5f6bda63850ee2b508cc4542d6", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:b799553c2c0ab0abd040196142394a15d429e15b573df56edd0e150295d6993c", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:bdaf9c5be3de423b3d1b72c8bf38e2315fd58ce10ca6a58873c7d3e3a9c8aed2", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:1de79cf621df4cb04b8ee1201f38c91d8a23cfd85928894d4f9a8d3a27dd99e1", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5d8847ac4f68cf59bdc174bc1ce3688f86efbdd4a4563f701cdc74b2fa01504c", "enabled": 1 } }, "extra_varrun": { "400": { "checksum": "sha256:6c694e4be5a9d1895e17048eace0eb110c69a81ab1d1e01d59c2a075e08a4f42", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:58fbe8fa7832fec940b7afc7ffe8e4357ddb5a03a662687b928f84029d81c781", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:c821191e37683fab6a25fa714edaa75bcd7a81760fa8b547c31e40967875a29c", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:09288902a734ceef738fc904463b50798ce700c15059c70d092412b12ead156d", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:9fbdec8e421e1fa27dfea13b163cd0810d404845ee724b6f1b3ca5e6500a42c0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:9144a6012aa7771292a276576f811b7948abf4b7fe2e07f05c66d232d5811055", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:ae1f3ce0ff3a003f1db93dbbe09084b0ba32675b332f9930f23f9f5e66f57204", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:60856e056bdd9de8ffce0f5468846b00616fad40f87d38d5fa73acb74475d83b", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:8d10737fea4fe0dd3ae3725002a8f0c5889a3645ba4894e9dccec01a3e51b3d9", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:260a661a05f5958d32eecc692d9d5350d51ec0ef9e9bf29aad653d8637ceba29", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:e206bfbfcbe748672784fe52a91a1220965bcae5ff57dab458ade953f0b17b80", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:8826b12f85b02168080b03dec5eef5c91283ba1ebf8370022a71170064a97dcc", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:00b8b8e23b9e36087646cffa7c5126b0a402ac38a958930d27fd058f78f67987", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:181e899c092e42a648f7474f936d3413769842e4a0192dbc91cf587cd1547ffc", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:54578edd17537e1639df33aa54a731059844519c32cb8dee24e31b29f499dc67", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:325a80a2f12fed84077e57ac8725cdbd3449114115ac74904280c05c4d9f1597", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:9ac486b2d71758e95a106894de9c4f5b21506e07caba5d3753964556cb042fab", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:0a0e0d24bb9866726e90384d92166829d3c43e6086613b425735544745295adf", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:cc208709ab1c0862004f9576e53a62665826c6cdb5f443eb463d8743cc399769", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:9505b4010a4aafa33b27c1a73f02f7fb2ff720e95ef943b40db387b893b7499a", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:a1966f6618bc0d636a87d83d852abba0b92bcb8aaafe82837b39958954490ad5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:80108836908472e7859b47ff8ba90d2c629f02666a3246c2dc7e6039ee1dc099", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:42e7cda751258014b8bf2492522d20dcc0a1c96027d8261b7996289ad136ee7d", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:840c649229032dfd9b5880f50fcd371e5cc4c87fba7d424f03f3f5f28cb1f686", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:ce63d6d0ffc035614b61d82eae48a44485151cb6e93a0617c782116187ab1ad3", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:3b3f4538fdffe23885b90ece09b6859afc8a0b7f3314b9b4a60bcb9525776725", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:8184e98e265b9082358f87a8a715bf235f96c31008e60541b742525e7f09bce2", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a57b0a11f54bad916a170bf890b15978ad925ccc5e976d9d7b94b6c66f7c2e83", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:fc4a2c076ee26500d58559dfd29fe267a6f1ec33515064c8daa16448b7aaca9a", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:b13286a614402a3538fc0387f3d7abc30085c382a33e83faed9be57f33b63f45", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:37d95ab4a25b542db931edf26632d35e3a969239ff1de338b037e2e5ec506fad", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:1eab1ed96a9f87898b99be5005c598d35dc079b1ab5a7214ceb6e3e5c50f8810", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:6719dc568ff70220e53b2f1ed86d9a395a2f038d99901396022e4dc63d4ae868", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c280b017518cea08d176260a60012fd4d62882dcdf6bc9fc2005c74573b2240c", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:a6e5ded6ba1592d16d507e4f87b6078156d99e9554184a9912a3a91819ebb5df", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:a90844f8b8a25de5abadb4887f1b1ac84367f5ae248d9213a90a39859b3e5df3", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:40b455ce92e388b7f1eb0c65645000ae54076221c2acce0fa34c6f8d29d6ee67", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:392808628481e796663a1b99d1340efca31995d4832ec45fe71a939f12c117e7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:59557d1383fbb0a9586e18a4b129912d3ff989dbb853ed29bd0e27dfc160351d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:c850d134886113631f28665513a0536ca98fce16e53a9b3f146d1449ae9e0ee5", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:208231fcd39727d36f759dca410d8675e5852b7330f966aa86dc6e37c9abb22b", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:593cf420e0ac5523489f53d4b0cf2af0eaf8821d841f947349963159834a764a", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:630a305bf2ae45b8211c97cd029f1ae4247e0a00f936d8595e3cff59570cbd5f", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:104ca47441ca07c42c5e4770c1eae2178d2cdb880a174581032c7f846a05fb6e", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:b0baf75f1edb1c27f1caf49a30874604f82791ee1b1c85c38a06195f8d806b0e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:ba9aeb152542b5bd253d5a6e3b6aeff3e857615f4f42836c19098d45263fb120", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:177e6ff2bd9b8e6800b6138497d26b5cdd005046f6c62f672ecc66701b1251c9", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:32c9122d027bf6229b8cf18a4d45fc63e38c5b0a3656312854833e4342e0e608", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:42c6066d4a0751cb1db4526c055b0527a4d9403b45794571ea0dc4c71a666bec", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:997985873de7774ecab07db71db7974723494b65a569e2f852977c25d381359c", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:80496dfdf52576d83029c83097446766868b289a06aab9e9df110b733594a98e", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:c739061ae87ecfdebea9afd0b8021aa3ea154e8e1ef00ba148c82d225ee0c8d2", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:81d97ceabbc97f1b524d3e0e60904f5225fcc44996a83d9db67b7ef3d8b18075", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:8eecfbe8b3b75068c3c26b6fee1cd79009098d65b962b8a847438e8c31e9d053", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:2ae3ef5124e180523c5f610cbd536ad55c7e0b8e7c551201c29827e59c7c1594", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:34f943a522e251615c58df783c4ace2086a1752a3b69e5cbfef2ec5d42234da5", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:a0a2baa7b6c1d5ed5e5582f7ffc7d5a8cf2d4e7d034f50b1f3d0972fc9674939", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:78f45331782c43239be7330f5b928d9dace6b3ebbfda5e07c1374c462fe06923", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:41297d28af002c4e97c864d3b5ee64f49519b4db72a71b5bf7cd104c2b05af0a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:2d6c154dc940a2c178931902f7e0c0a1e9f9956055f92fc1bc92b1f2143a674d", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:33d8e3fbc9f8f48ff7a69685721a782c9f8b62bbbd1878e9bafefad5bdcf51db", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:653fca3667c90bf30da196ab61d79ee5afe1ae9703324b2512180986eec8d6c2", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:949cb7c7b62d17c998f63d9970d6fefbf5b3d56d65f729bf21a4f6703135e3f4", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:c1e22e4778b465a08d815aaf53d71ba28122b061bef976f522a2304366849a2d", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:a308db644962bd0893fe1b8bc6571460b377f728ac28632852ca3b9c281ed74e", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:9925a9acfb6375d93a08546a581a90375ee8582972cfc9d6884204d538b895e6", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:0c9136b18fb83249b1dd825fd497435d852adfaddc9d618ac4d269843a458317", "enabled": 1 } }, "ktls": { "100": { "checksum": "sha256:f15a20f050208e43060eafa61f63a8e722792b76724c7f2fc44c856879ac70ae", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f2322f689c55de691d98651af5bfece0b87608950ccd1a92e9225cfe47415851", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:454587674794c66f8b25f9e90154c291e81f6ab93d7c8fb3107068cfcefb797d", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:4d05909abe38f75a72561bb28fb279f4771d6886406de5d4665111db56181972", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:dbd4d9d61f7e57925f7a61e0a42d65273d8be168f6e3c77b5467d7b9a93817ff", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:3121357ab50a02cfc634a5fe4250aff89a1418865918569b77a10cd333cc0018", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:3390d25acd3ece1c7404db8c3db0f5c80278d5063fab9c8f4a8bb5584b5ded16", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:bc457c7839567f5943e06ec31f915742988f5e602c918a3a0d46bde5b94b6c78", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:d369ef834c0087ca09871e4dff0128cfc8e39a97e1e3b5bd3001fd752b7af5cb", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:c739c49825488aa1ae74fd218a5718aa3c859cd1205a1ea581710fe539bfbde6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:6a59e4d4df92e3d73d66b34035aaf00f5ca0306da24bd478c72a39c7e7844960", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4196d8e4db83bd37b4e883383dfe8543fb33029b42c557fe5af7e8475b558584", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:5427ae01212227c3a719cd1e5664c1290175bd574d7927903102147fa51989c0", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:7d1a24bbfe8deb3a3d7aaa92bfc9c922baba1476561b92f828aae226fe9dc3c4", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:b772895524eef04c9c79093c837e6033beff39717343d76528a8a85e4a466bb6", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:5017fd004213b4ceaf374bebf74e35a0084faaf6cede37b78769036a05e34b9e", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:7c71eef6360c66869a42a19a34ee30abc1064de8fbbcec0098d2ee57fbedb79a", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:cf5a647f3682f454b850317643416460ce6a7710f3f5fec6b0deac40e3c72e07", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:067389c903715a12a93937a436e3df918c42a4871765668bea50eca4f02212ba", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:6cffe11f14b5c03ba0969f0a3f476455cfac505f2cc1f2d467222a21a3ed7c5c", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1ea9c32ae0a7becd1e1879dd4c4b367d450b2721dd8fc3f771081d1568b450f5", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ea5057da646444d5450ff16e5dcb82ab338e8fd5fcf5f8dd72e782ef18ad1031", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:8de073e5cf69c58d03162e50f5fe7537ac8f90c81f02d2906cb10a910a414ec7", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:7d0336a428c29ae9a91c18857f594a16f74f5a963607fff966e7de78102ff76b", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:0464738bfa038fc9ba7ce06c15abf3ff5c2113083e236dd8b96b5d85b1fb51b7", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:9489c6c732b353e34ed3e5624fe8b73c336f4786c47bc30827b4a5a59b7dca44", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:660ecac63132d47b51afaeea6f55f74e3a6f25141a4d0d28065e094d7cdc6c75", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:b0a2d9c52715e340983df89e8adb304ff3790b2564659fd821843a3f172d46d0", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:04b77283c6d821ca98ecb58ef7bd17f6f185168786887a67f4c71cceeaa0476c", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:ff9433431cb560a4ff03dc02129289a0f78d1909fe1f3954347f18e318c3cdc4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:dc069f3a6c78dc367c39cd7e50fe17948cf9877f3e306f090f1160b07989d503", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6890958fb0f7c357a4a9600c34e21bf6fc9fd8ef36e9a5ad516b3bf2c1d88bd6", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b61027e2a84c3f6fffbc7eb3fd40788bd9dfb036b3e04a8f77d233e10c9f2ec8", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:e08540cc55168dd36811b1962936ffacaa21be50b15b9d5d34fa9d55dfd125d8", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:bd730a6479baa42060a62b9c7346dfe21ce28e1a8a432342aa5f302c2cf8ef86", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:01131128229571749a7f5df2e65e22e9850789bfe386926cb34e91153ca9e88c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:edb0f4d496b429a2b09ff9b1d74bd30126b5ee2265a4370f6e992cf9d696de0e", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:b28911955f6731646cd779f6b89c2255238c3e60e1b93d227ce588484694f755", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8bc2fc39e9a6cef06df178607ff3e17604e86d709575d37a60de5c1fd2b9fead", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:6980bdebf1af99aa6822dc970cd6d5a5b430381aa11e96e40244db39265b5e4f", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:3b235676dff7abd25b2b57fa770833d05561bdd24216f4de1202e9ced52a4f4a", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:33be40fa2b50df5f7234ead34a6471ff1eea62de62445e509c28e5bc8a730364", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:0d4fd8a1f74c8e46c18a93794b305dcccf3d50e9db095b659d996712e2905dc0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:d4f61bea290cce978cbb1653866414f9f848bc56ee6491cf022e9131dd2ff5fe", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:6f174abacc65b0de9248c39a31210eecb6fdbcd15ecff5bc254fb0d366f83806", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5053d74b0f4734131234b4faf6cf7815a725bfd5b73b6acf07deb77a3cced1e2", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:0538a3f6b5c469223bfb2740d7365838eedf7ef65b89353645e9d3bf6e17253c", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:f8b11739918f67700fbef58c2ab5c87a61413acf6aa8b650a014285c0c3684e2", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:a3b7c308fe73bec0edcfceb85e1e1799927a4d7e25ec4314649b447f670a49ef", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:dd752acc5dc10414a4708dc0bc655d7861bfa74bb20863aa10335dacc53357ba", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:bd4724acfb4c0ec9283595e24e29f9926c18e7af0169fd5eb344ed00de6bf393", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f1e989b744c90ee0be0978d34da65a84fdd81e5b6aef8ba116560bc157d73f0a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:d2bd05813a6a5257688f9bb486a1bda49fb169eab4f16c3d503e01883c52bd11", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:03597af2e3a916f7c4eb83e1b360b24cad9e86ce814494bd68da602991a70e7e", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:66173ad07abd0c8bb7e529350399507549601923afeca8e2ff2b0f80cb9992e3", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:3399e9663584d6d1032992f903b7aba4f96f4f0b7a5971faf90eb816cc7655b3", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:c1107cdfed17e78cabd9094b3f6aa1d9537f70bb4ddfc236983cc5fdc167e8ca", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c73d5f710032819a6456d1020ef5fc8bb683aeb167b6169f56a295c31b14c72d", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:d733f8dbbcdcfa398f6f139831236fa6cd0abdf132090435bb647081d2f6a785", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:44657ecdfa5bc1235f85a50222e025ac4721b24a01af6d167525f7cb0a580c31", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:92ded69a63e7ecda34b1d8ef17ffae8c9e8075046a724f8f8242f4b66d2eff19", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:5dc833e3b3dd31a1af446c7883f6a2b92c40b9192d072ef5de2fda7ddf4f84ad", "enabled": 1 } }, "passt": { "200": { "checksum": "sha256:d778011449f026622cc05ab496a39b6aa55a7e6447621a5ff7afc242b155b0e2", "enabled": 1 } }, "passt-repair": { "200": { "checksum": "sha256:7db523cb1e14c32587544907a28237c09c418307c349a9c6c5a0095c9ef22533", "enabled": 1 } }, "pasta": { "200": { "checksum": "sha256:cbdee1f9990db7defe1393b55569dcf01a84786f38a49e923b023c7c87bc2571", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:924bf0bf4f0b2ea9d633ef46f55793acb2eb3da6379bacd355814507e5ddf67a", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:8d6835bdf52f73dfd1acf73ce13ea8325b0bd3d0107b0ba86953fe2fbee20330", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:016a326cb4a747756723c0e7d675e4992e8abfd1f51a6c06aa93066bf45412ea", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:ee292c9774f2109ffcef5b2a1ac7ae68e44f719ba40d155f84287fe03a6c01af", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:5d77621f8da0f789c1b9ea9ac24925e02e0a7fe2a3a26cd7e5f46085277041bc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:6cfcf3051765f61e954cd243d3b652cee14d378e4925b12569512e5ae815b40e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:07669cb2df2c61ec4cb621f3332f77f351facaaf5232a8a72c61a5ee7bb44d71", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:24e235787e311d82b99df7b41d724da0e18edc3bc6443f9f83f8d6247e33cbac", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:2c0350e46ff4eb97af27f63025763c565d7097457d4cde6f46088afe7f8929e9", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6c7d4f4b8227aa55a5f142bbb8faef130cd10710101eb6f0aacb62547db5f49b", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:d59109d36dd2868269eb18631e37feb5981db0aa780c55f7e0fb66d897e4f48c", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:93a95273e16837c24572e635d58446ed1162ecbfed59695e866058df4dcbec2c", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:f878b2cf560b4bdff33fedf8c8f2011af390b77ee8f9416fe93ebf46153c97d0", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:7c128725a61bd30f3e35f39b9a832e5cd3ef435dde58241616b24e28f67ffbe1", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:60153b9f850c92927ce2a61becd9c248ef56dc0ceb7ba990185b98eaa9b011bd", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:ae9f1c81d0877b9f40c9d9bb5b862b7c58c73da9045f850a0a72d1b982fada35", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:8d550f8b9e80beafd06bc1392e60ecba8e922f8d0e609fb6674de5cf27c8d772", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ff82ca8bf6365948aeaf3c14fbc7ea9a212074d1462a31aa676b542d0d76c882", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:664148c3f8d4a649714cdbcf15e4862a5e648e0aea83d4530d23866c78c8d8d0", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:d58fb38422b37d406bf3e79136e3a94a40885c08f9c1591975c9a7495b7f606d", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:8194c7df0ea3abd18f07481b0181e01c5fddb21ebb594ed5b20bc1ced555fb27", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:ef1377e6864d9b5049866f6f0c3986e474499f1bb0082e9430f208e2c9d84b54", "enabled": 1 } }, "qgs": { "100": { "checksum": "sha256:add48a13d9b3cc5c82c73c2ca7d72db10b074970c14e26d58b88f670f9221655", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:c5e1779123c640fc55da0871bfd96bb124d8c9b50b9065136c025c83364f453e", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:71a7ff78c03cde811d19a4c115de8a898007bdf437a9350d4708b3f9142481c6", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:e66ffb20855170cda4ec60840ce05e73d69dcc54330c86b24dd89ee96bcd1d73", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:682232f167f6ecaafcb051df5557addc52b814e923f143bf37a2035fb17315ae", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:0fede9cbfe184d19e8ac7bb68a1ce8a110aa45898ca782e3c9daa5649a476fba", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:01fbaabbb5b83721fe19a813401d94510f6fb260714c3adcc40d54fbb994ef70", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:a8e3e2b90df3917dbaf684a1bdf72432d8bf2aa6ec41233e06a2eaf02aa81686", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:8d5ee75190133ca16f3931a80ba1202b6cc171e6a3b1cba6dc5788a33bc84e0a", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:fdf6e82be7b620aaea9c8928edc39344d32dd9b1c4e0f78a6c6fba39bc005b6d", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4788c42c425e54a8dedb4882a6a2bd2183ad72f980f4217299be830afe275069", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:7d65968a2e3d186de718f9f6604f2cce60bd08bab6dbe0e60f60222b228a5744", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:78d9abb7263a5c028d7065c0cadcfe14daf3b4aa064e679458f3bf271a69d2e5", "enabled": 1 } }, "redfish-finder": { "100": { "checksum": "sha256:e05fc89dc14e7a723647597786aa62adc255ca1301474ff0c29dff49e4176e4d", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:825a97c385fbcbfff670278b26a17f91bbfa8585f2219efc48781e0e510bf213", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:695b31e12a82435b57e11459e99444fec8d09aba051b1a12b8efa765608dc719", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:892885a058782b7fdfb5d86e5ec3ecca261363a14a2254652c6a7ff8a52807ae", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:39bc17cbd08c0377eb935fd0ca86b6542752c5ce07cb0f9d9e5d8adfe4306a13", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:3da6785a2c37296fb1ba2a1b621ebccc9e0837d9acf69b3442e75f3a60f2a484", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:912bf2ea73ebbfd1d5fefee37b336a9002345d01f8eb54cb164c28160fc4f1c1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:66b1ecc6382afc5032df2921281550af0431befd8cd517c4f8c68cab2eac0e11", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:4ed93113b5ea0760e89533919f86cf1dd26b5587a9d7cf8bd951896fc77d7fa9", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:008a840aa2183d0fbf1b3f3bb9542a7ba51c03a1e3a415b188ca49d2e4ed7e51", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:3ba51ade82ac9113ee060bb118c88deccc4a7732312c57576fd72a70f40154aa", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:b4fc4fbb8572088eb785b643f5d103d5791af96d37e6cce850d671d9291bf70f", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:6b4e7757f0422a2c54d93e920ff7b2c5bd894d495065b3827a741a768f042b18", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:702d5df73a6865bc249ffb537ad7a0d2388e1540716e4b2f7e844485870e37bb", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:4cfda0dd9868ff0890c7a612f07c282a8cbe4a319c766d7cf842ed639fc2b34c", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:64c59a71e1786fba000398e05773c83fbbd9f92c0341e52cbefd1386357b4e16", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:2f0c18590911b20c58bbc9db0c9c0c471f4d66171f7400079a2e956366580e24", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:f19a726a7c78ddd9aafcf8d2c4b6a57bd05fdc8450a91119e1f0d0abc09151dd", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:b29d987a469d59767e7120202e2abad06865eaa84d3eb61d2ae6b7a78c1d6dca", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:44e8808dad842eb55d51c204374ef445bd8515701db580d2c91f06ca9949f2f6", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:4b1585496c5777fe140f76f11a62df0ddad219336fac090139efbc368520d38c", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:2a990092d1cf38541a49375e9e605d82515a34e19b9ab6b70392afb596e0c612", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:80bda9a30a4b5ab4b6b14d7f6c92efbfd5a63658a4b44565a02c2c552cf4a28c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:405780af5278be0dd7f89425f91ca1c48527743d2b6876bdbdcc7545d487dc09", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:f76f5b094e42967dc240e161cb187bc528f2f2a3ee2ab93c53c0b15d820c0921", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:99c31c501752dfcb8460f44b4e363b9d57b85c3ad422a951f13f2d42e5f9f54b", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:8361387196f6c48bbed95c77561bdd324ab96356d6dd0f4874832accc67738a4", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:89169ffed763d6257769d5ed83185a9eb376145baa60dbf01b4088f37aa663bb", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:7727a62bcf612392c76d46f3cc8c22f33c3c87c30a320805ac9844ce68409ecf", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:1ad633f30ae0f80052b31090652780dab90b10696c098ac81ea831035a652835", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:c9cbfb3894148ab693f0c850232f3a1b1aefe5c5cf5f4a06bc74d44cdd2b52f5", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:67b8654cf2404ad763f5343ad3ded35f198c26e99b8a9a150143911acc89ac6c", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:6ce5485715b3caab30a72313601de971e7118bc2997a2edf6ce7b229e51c2483", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:9ff7693f6fb994a0a53dc46230b7ce6c4fe6dccc2b2ec2c8ba49f7c1e3f24eea", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c888a4b5fc698c1bf7551bfbc6d6ea7673a5f7f41d2467af7e15ce634c71e2be", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:1ed05c5ce069437c9de8a57326a0329d883ec753f3a11fe4f70a43ad212ec482", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:191a531a60c27b33fadbdb48213980f03b68efec3287545eff3592fcdf4bf686", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:e6f726edf701657c80853712b94a4bf5dd0430254d93db45804e60a243c51818", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:8a6ef7c3d8ee76e112224e0c4e0b91572db8c85f547bbed6d7ce3f6f6d4383de", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:cc162915cf1fc3cc66616c3224e9e848485198a28868c237adc9d7077791cba8", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:74b5c41b13bd849ce82040012f557fec4b9cfad3a9072f9f17f78400868da558", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:91acb71305dfde220ce7574e2ac67af16e6f8630639dc66d494cbf8120d2d07a", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:9b8a5c1ff4c21846701eb5e0603cc022f4530c568db6d9fab392e41c0ed64720", "enabled": 1 } }, "slrnpull": { "100": { "checksum": "sha256:bcf004c239b72d23fb4f1e5842272bc20f287cd312ed394464db8cb9218f4377", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:fc3eaf23ee99b98d2ff17a5df04776e8553f490d7f57d49a24061cd49bfaa997", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:17d8fa5ce4b9402dfb10ad431241cb2a5a1b2f726caa03ae7f1d7d410c2ab6ae", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:6506687dbaf850c784d6f2af14197d3c1768514fad98e08fea69e92a780ff65f", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:59b6f3643d2f404ef03d749628b6872fd650b5b10851862b4accad8276bc6f29", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:34b45f69552f2b284b1f6e0876e4a96d1c05c28e4ab42d2bc2a241c03fa73309", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:35ef9c580c4071208af6169ae1059bfee51938d36dbec2bc2354d51ed5dc505d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:5594f07c04c9057b74df1612012c2515265ee04d58b11bfa46a73531b703c1f7", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:b00a50f92d0e8ef2789d03756c7bee69f983edfc4a3f409304835ad25133e3a4", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:874410d4edbbd1f73ef0e69ea40e93054a5d65cfe1556b00f6b474b928400a39", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:400e9b1c9ace97d2e43b5916b453d189a5c6f60133876f15672a48607edfd0ba", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:66beadff1a4ed7e48b3f3cee1444f5f1aaa833d212cdc76068f2f306b8455970", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:fd8c0b8cc073d8025ab8754b7885e0375b4e700dd3fcc921c45666829b652de5", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:1b2a0e330daa04838742fdcd50a9b539072c58d48e949e4a3ce7933da47cbe3c", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:2ab07a8deeb7ef4cf09f94bd2ba250166a4d016bd9c581ddd470ab2784baf5e3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:e7caeb60df6f2002f7be4adc7a1506b6fb585e6bb9f4585381c115a90bff4a15", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:836d01ecc314a2b2b4eaaea69ce1e4a03f3274bd8bd25e2b64d0329e6f9d8f32", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:e2c86cd06c00d3ed79b9f7a602b18593d5929156df58e761a04a3cc3ba8be891", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:67fec37a17724a9b059f936b70c199d96906b9bbf703dd8a1670852dbfc7715f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:dd116a718e125ba88d28936b746a2292088080254134d2001084e2d252ce9379", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:df73dbc3f1e232bb5f4d3ba0bd1850eae3c3bc401508b1819c0989b8f67f8033", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2eb63b8ac8f3038eb1ff3bc18fc5923dee4ac3f609d8a14791300ae835249a9a", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:d342a188298c1fcd4df99c4235985c50ba2f02a4e53d01cef3de48bc31464ceb", "enabled": 1 } }, "switcheroo": { "100": { "checksum": "sha256:f8f67d2c990489a09a436dbd72704b13d6617fdbbb8c5c2c040a85b584de6a7b", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:a8f135ef10becc2a2ffd4e7faf89932ed4aff16331eb62d59e52ff2a5c0966e7", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:fc1ca3d8b12406dfef9f012c9275817169fbfafc411969e60d357be3b35835a8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:ab2acab6cbf273ed7e78e577b0e2a85225adba387b1a8908b180b07adb950e6f", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:815d229f0b5a8f8a44cd511b5927febb002596a8aad1b85406d674e59378a0e5", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:2a643246c63d64d4c57f3877ff3daca2637b195330920c2efd840ebade3fc20b", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:f3896d2de3794d7dd54fea03cbebcdf4e6b63bcc512d2fc14433b3be400f4188", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bbfd79953db88f6db10739803d29b003d83311a21c75604d64ed9fae26da541a", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:71c6423e6318342438fea1ba8a38751b5741b4482ca8ed075dbdd36bc6fda9aa", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:f482585c8f26517c6ed8e9203bec4adadec8ebc65840089d7483e31ee24fa679", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:a5312c216b56620ca8e69679e99275e793b3de9b6e524db1a5678d22b9909056", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:3a4e10afbea76bb0a825f3e10b6be09c1e380f19737aef7a6171a9744c15b33f", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:58aac19837bee6fd1c5e3d1e2a9c9900c56b9aff34b643fa9d958399152afbce", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:46f7b10654f710546a61324618f68b753849ea0b6a7e11f431922a5c848fae89", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:f3d5b0012a6f6d0255e831f608cf0d77f1af38a975b222a7f71cf0821f359246", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:2d749a0f3d39317412feb3388eec0eacb60859891ea7da50373271f03ab66c5a", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:5b1a3e31fee719423530b8c7c07b6649ab539d38f2b446a3e6d3f029a65696ae", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:561814e9fa4d9ffa1be3bcc8e27ee1a50260293a17de3db6eb9d4a83e14e8faf", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:48fac9542e02d0c8f461e03905339795331b4fcb2082e830e83189e50af59040", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:80d84cb83923e4d5d6b9870b4311a67c87609f010c5ffcdcb00ef6e926a8d785", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:33a8bba7a36dc094b6220c0dfe282a9e57ff280511965c99d654f4e584f960f0", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:38e42ce3f0baba47216f3b50d7bec9ac531a11d659c8807d0bb43b5e5b4ce873", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e9267049c61e87edd481214c8cedfc02cb396789c52a150b58d8fbf0401bd455", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:2f55ef3a5145328ed09f316753cec5b85f67c1b43902be5152fc57c4b95c3026", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:51ec0952bf860ec23e3bfdfd53f3bfad841a4e5b560cc25a9548c9b207504194", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ef06a218a285a5a01a1e354d6a40f826815203dc323d00ad68e29f85162c24e7", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:f71781a997aa0d0df5c9baa600b6212105c75cc290bf634a198ed0d5b42a668d", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:f58eadcb76889082e3a109afa993bc7eeed39675991d171a13744bc8b61c279a", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:4b8e317234ae08c1f4a80133c8abba35d412f5797db3c4515d0cf051c35af6bd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:3c2a65084450b2459115a69bb1d382e452a1da63080ac7fdc85bcac36affe1c7", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:ca220cb87bf9790b38738b6f08cc800a2fd0e083960aa4770c9385b897cd31cd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:cfcecf645d2d8a59f98135435d535133a39f70f46d9b47a65b15e88a3805861a", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:91a33317bdd39510dd305d768e2791d08b207d8384bfca22322ec49f5b26f9bd", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:c500e8df08994b81cc1d743db684060d03bfe4465fc12eea9a4af83a69af307b", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:db1d0917d263b447f9a744edfd4ebfeca697182c853295c7eaf49f1270218858", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:84679e67832759be8220885abe3fa0157305fc8f50efa604b1343e99907925dc", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:5ca3d53e3b62d5973442d210faf9b9f5f9b5f4935a74074ce4b18836c8d78b19", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d8fadd99af0d343c815f006330529911a5106641ed9c7d22a2eb72e0d9d55d2d", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:664ab4aa1e1eca422d2c627a22a9631ac348221893713bd9a4d97a628094b1b0", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e68a71817476b5ebb8ae2e13e9ea9418a31dd64ffe4e156258cb77029635cefa", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:f45c6d89a3305814e44a05c0d8c8f8a4ce8a923d721e83c9579f76d8d8cd909d", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:8d828eef8065f2486b815aea04ed491419e3bf17508cf0ce595fca71f872ba38", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:76a11dd14f578f940e874ab4d68ca1370ddfcb2585b6a3a955569fadb77d269f", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:17759c6e3a6229e4a40be0b8121751d768f00fd6ea0a872f4fe65bebe2280b30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:c9c26249a11c4bace4efa998ae826c3cd5178a19d323886a62b7e355ca3d8260", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:ea826918681193d37db69c814ee4c753fef3fcca809cd0fad6f924f829eeb9eb", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:a9e221f7f656f9f0b4937c2bd0f7b93124c7f48f4c88fe8ba608db1eaa5f05d1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:034bceb856cf79ac9329a4affb6cc53cf29c5bebb089c0ddd486a76148812b89", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ea40fa389e6fc510f40994b9b4272a6b985c80064b8a4d702d5813d5252487f5", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:308910f855a076bdf38241880815f6640dfba4b21ef1be58112deec3ed858d16", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:dd07546e8a114e1b7f5056d4c5b0f1256050fe93e867fbbb6c5f52d2c6f77ec6", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:870a818c9c3a4e4d24386bfc3fc7565af1c8aeec605b3d4cd819169172bb3e03", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:476c08aa43723ad6bb98a7254bc6cdad6ddab4aa63336719c192bbf6f5ba6700", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:e27315e58a548c06561117f2dcf86c67e6937dc1ef2071ee612975457091e40c", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:a077f44cc6d16684de9a93061ee0f7b212e3f729fdbdf594dee573fe5c30817d", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:8228eda847eeaa7529b089edb8c64763d03100e84117526a67fbb41ea006a2b0", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Set SELinux modules facts] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:164 Saturday 07 March 2026 11:45:37 -0500 (0:00:02.803) 0:02:23.453 ******** ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:7bd953bc370c70fe9299b766f8a40a1659e03f7ef4dd6c722c3e182bc90c1c68", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:e8caedff457d24c0562673868860f813a6cf223422bc48524e7cf1e8df7ddeb6", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:1150e95aa33304027895200fbac6de5d0ec1ada237d1cf255f979bcf712831ba", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:634c80be00ac898add54ea6d59ead5a6e92e4d06a230b9b4485059070b0a3bde", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:90f08987cd8645d1bc99245841a9f2d0c9858196064df233655623d1b5cfbdde", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:c59e1e8e511ef99a0e5715ed9dd2c15ea0b522186e683ed8bf715029c4ef325c", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:ee1199b88bcd39ff6de202bdef25f1dc7292828d80856fa535fb80454dad000e", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:3b9f22d94579c8dd60f827159f6f15a2085d9bb799cbc88d7c1d23ce7a63aab4", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:449d303fa3e44bb7afa7b0a715e9566e1e33fd3368aee1b078529f0225cf56ff", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bfefb6205876b2f58e84c1952c749c146f4e2b8107a660e084614b23d60300c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:3a903d39c2d9de406f33790f234fde1f1d0b20bacae36fa0c6bfb5fee9f800c5", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:35030bf2d1dc7ec055a954de113ff7918709262d5c318040b0cbd07018e9ee88", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:5da016180d7da3fa18541f72cc69eb5c9ffebc2851ec3e6150bfd5a73153f860", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:6432b280ab64da2e35f7df339167f29bc9b9dca4c01e8e8a0c409b7a0adbd5d1", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:856e89b68ecf997f8a33e98c7e4bd2250a43f88790efba170f787434139a8c0b", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:78ff1f7154a00c128cbf5c237452baf7ed1cd46cb11378439b64432d1db58d4f", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:1e517a22f8a71ea3ef177798685dfb6359b1006205fdc97a0972ff1cf7125f40", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:454cc3d74ae64acf78ad17344d47579841f6b44266c6c3d56f58594918d2e3cc", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:4d13ddead5cb94be9c944061044e0bd56974a9db9df64f7259593b57d51386d5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:00bebe07cf015d4084870d1f0866913ae687801ca2d26e12c00df6823b3bc304", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:0cb5bf9ff94cee18667b41dc4d1b988ace9baa06ca99507a91ff3190f4e39d35", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:233825c029885cb6196920f19b27336b444411b9a15b956c95a2a07b89e9b041", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:afaeabb15d1d5e4f3d07865c5213f4a78ae5865d0f782e95d1c599e61b7ed7d3", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:2c8ef6be5667ad71b144c8bd4ec606b56cecd4e3ea1d242cbc657c1c993d99af", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dd35cbec0b5e8f81e3394a60905606fb9d986fd394ad60ccedfcdb60f0137b0b", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:e89032180210c66a288c43d2de3a47b285d38fa239226bd49ae19a1a0488f41c", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:96474cc59c799aa0e25123ea9909b4fb319a03f1b5f6cbbf1ae3dcda374815a7", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:7c8fa6c136fc6624a1dd4345c3484ffbc07c9a4be8b7543d78f0615680cb73cc", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:1b066f5d029b5584d34d95007991d218446244f994f3ff802339cd5890e48091", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:60ca58fba194f53faf1c0bc41f8eeeba9ca3de6f2da08f8940b6d1d3093e7c0f", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:815d2bba5c316d5d0334add30dca473daf3fdc85e48785c26c7b47b2ef833823", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:4ec687f59310bcb03685bec14fec451d393508d1ca5f926209ba967d42673d90", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:b6821587c3b2df8dc3ce8de9851cb1be120dfd68e5729141e7a293917029e978", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:df9850293d6833d206bfb3a875bdf69d0823daf24993b30f962da683032555e2", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:de4651616a6c8dea0dd4b018d3ab32c1506ba75188d1bcab2e04af461eea6040", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:91ab7c5c9df2a80b515c52b105f54e9247b092be7864be939d880b2f94cec862", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:bec8a93b694c60226db8744867c6f87775440937699ac0d023e06e7b7aee1d6b", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:3f5f3b049123ab0a61d1f7a7e6372bd7d2194feb212f2b5bd85a9148f21f7db6", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:0ae822bb67f347f0a88f4ec8584f394e3e10fc11363dcf34b1d583305e76c9e6", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d20dacb3b990c66c37bbf1bbd081a84a0e35f3cdf1501c27a5ec881c3d187d84", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:090e59b1324bf559d79a1ef363fe9bc1bd2adb928f6a95bb1628c92f93063415", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:80b987a686635b3e05bedf481ef892af7231100a61fbf6ca5e93da17dbb887c3", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:9fa130934871404f743c4803af509afa78e56b3ba2f83bd108564858f163329f", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:a68798c10fa97ddee5f54ac1d1281ecce65750e4e151076f4ad826187fc647a2", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4e04381e36d9df4d9f19ad718b1ddf4686f633f72b24d1161055b1f7280a81d4", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:33c562fd35e8b9fc5fdf807c488d1ac4adfa6c3b92dbbf87034a6732478e1bf7", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:8279ce237a5b4ffe5a80db09e71f06bdc8a4838910274ffc4e240ec99c185df5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:f89476b4ce6acf51cb0628609027a6c44a90db4ccde4da07505b5332a00b7c63", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:8a072efaf9d6f3af5ec04477f28ec73585274598b69d2e8f24c8180dcfacb15c", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:d5d67d239ca7cd2acbd4c5e15fbbc0f97810139fd352e9966c1e63a7d6ab5188", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a4da29d700315627bf480c63220b2b639ec0b87435f9ecca111eed86c1e019cd", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:36bd297ee2c16ed1564895422c05f51d957f09ef17120ac2efc93dc46d2d81a0", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:053f0dac3d8bc41d9dcbaf9b3f1c2e55ec313e07465db7462fdacf8fd89ce553", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:97391dbc81358c09228185edb79cadecb15bf8641fe8b6f3cf9ef970d79644ec", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:9fb7d00873d78a196b1fb639f107a92cf007803c7eaa2658eba7ed05081acb99", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:59f3c694a3ba5e60ece2b1ddeb5f5bd4f00fdaa67a5c7aa3a8fe7bd302963523", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:c05ca77b6a73640331abcf4018a9b7f2f3733f9e128bd96d7131ab7ba1fa823c", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:0e54e45a5adaa7cc24e6a273e25693919e92f498e42b8e136b7d7bf29be2d6af", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:629423401aaf5d0f529905a421a461d2f1d7ddbdb94020a140831f8873724c39", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:7ec2279bb83c931e6f379f45255a0727d207838ab55930f7595e0ab1e95b8db3", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:601b41f04bdd9789e01a1158241a17c7c4f937c88adbc75e9bf8875ee7cb0756", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:9f9cfd140d7b13b9679ba8b8d7a59366294db02d816d60af2e00a3fff1f6fed9", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:5d64fbf2f59d2c8ce842a9e8adf39877e41bb1d3e77c374681044aafbd662d7d", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:1ce15bea5149f786d9b714426a2870c43d01107f2e3a6bd4b5b324a166508dbf", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:cd287fe5971d71a4512ad52ad855f427c8b722cf7aec6e884ca646ca3da0df2b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f6643411d4b5fbc33bd87d4b3b1d4ea1b5d3659a2092cdee9ecbd4dd700af416", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:41bc4ffe76c9e5c220822efd68a2e55b1126b38f646b7c4016a36263a89e482d", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:fb9a0c7ec7a8627b89649e44dd9e2d6e4cf70166b2a55f6509f898695510376b", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:8b52f0bebd92342ee6b7e00dfe3e20d3a0f041badd4312b9b22b3d3ab0d1b3b1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:22ed092464b3757fcc58749af15cc33319f406db1747f4b28f74feb123969612", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:7633e1cf2075f6323862d89b5e0072681e64e41895b6caabbc8c6b18223dce9c", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1da30094d8664d16dee43b934829c800003e49304f1540e5b41f9fb12a2df4df", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:6cdf81585aeb903ef5da64551f6bde953aeb48f8623a8d416485847541b7b283", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:1af7de0f7c691873148f17453849b3dee97e78a1e8108755c1c133c05f29b651", "enabled": 1 } }, "distcc": { "100": { "checksum": "sha256:bd9199873915ce6fadfc570fba837765971726dac64a74e1ba74c55dc0b24067", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:1205bd72660c46019cfb8c3a899accaefb280f5f6bda63850ee2b508cc4542d6", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:b799553c2c0ab0abd040196142394a15d429e15b573df56edd0e150295d6993c", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:bdaf9c5be3de423b3d1b72c8bf38e2315fd58ce10ca6a58873c7d3e3a9c8aed2", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:1de79cf621df4cb04b8ee1201f38c91d8a23cfd85928894d4f9a8d3a27dd99e1", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5d8847ac4f68cf59bdc174bc1ce3688f86efbdd4a4563f701cdc74b2fa01504c", "enabled": 1 } }, "extra_varrun": { "400": { "checksum": "sha256:6c694e4be5a9d1895e17048eace0eb110c69a81ab1d1e01d59c2a075e08a4f42", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:58fbe8fa7832fec940b7afc7ffe8e4357ddb5a03a662687b928f84029d81c781", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:c821191e37683fab6a25fa714edaa75bcd7a81760fa8b547c31e40967875a29c", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:09288902a734ceef738fc904463b50798ce700c15059c70d092412b12ead156d", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:9fbdec8e421e1fa27dfea13b163cd0810d404845ee724b6f1b3ca5e6500a42c0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:9144a6012aa7771292a276576f811b7948abf4b7fe2e07f05c66d232d5811055", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:ae1f3ce0ff3a003f1db93dbbe09084b0ba32675b332f9930f23f9f5e66f57204", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:60856e056bdd9de8ffce0f5468846b00616fad40f87d38d5fa73acb74475d83b", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:8d10737fea4fe0dd3ae3725002a8f0c5889a3645ba4894e9dccec01a3e51b3d9", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:260a661a05f5958d32eecc692d9d5350d51ec0ef9e9bf29aad653d8637ceba29", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:e206bfbfcbe748672784fe52a91a1220965bcae5ff57dab458ade953f0b17b80", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:8826b12f85b02168080b03dec5eef5c91283ba1ebf8370022a71170064a97dcc", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:00b8b8e23b9e36087646cffa7c5126b0a402ac38a958930d27fd058f78f67987", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:181e899c092e42a648f7474f936d3413769842e4a0192dbc91cf587cd1547ffc", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:54578edd17537e1639df33aa54a731059844519c32cb8dee24e31b29f499dc67", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:325a80a2f12fed84077e57ac8725cdbd3449114115ac74904280c05c4d9f1597", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:9ac486b2d71758e95a106894de9c4f5b21506e07caba5d3753964556cb042fab", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:0a0e0d24bb9866726e90384d92166829d3c43e6086613b425735544745295adf", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:cc208709ab1c0862004f9576e53a62665826c6cdb5f443eb463d8743cc399769", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:9505b4010a4aafa33b27c1a73f02f7fb2ff720e95ef943b40db387b893b7499a", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:a1966f6618bc0d636a87d83d852abba0b92bcb8aaafe82837b39958954490ad5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:80108836908472e7859b47ff8ba90d2c629f02666a3246c2dc7e6039ee1dc099", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:42e7cda751258014b8bf2492522d20dcc0a1c96027d8261b7996289ad136ee7d", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:840c649229032dfd9b5880f50fcd371e5cc4c87fba7d424f03f3f5f28cb1f686", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:ce63d6d0ffc035614b61d82eae48a44485151cb6e93a0617c782116187ab1ad3", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:3b3f4538fdffe23885b90ece09b6859afc8a0b7f3314b9b4a60bcb9525776725", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:8184e98e265b9082358f87a8a715bf235f96c31008e60541b742525e7f09bce2", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a57b0a11f54bad916a170bf890b15978ad925ccc5e976d9d7b94b6c66f7c2e83", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:fc4a2c076ee26500d58559dfd29fe267a6f1ec33515064c8daa16448b7aaca9a", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:b13286a614402a3538fc0387f3d7abc30085c382a33e83faed9be57f33b63f45", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:37d95ab4a25b542db931edf26632d35e3a969239ff1de338b037e2e5ec506fad", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:1eab1ed96a9f87898b99be5005c598d35dc079b1ab5a7214ceb6e3e5c50f8810", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:6719dc568ff70220e53b2f1ed86d9a395a2f038d99901396022e4dc63d4ae868", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c280b017518cea08d176260a60012fd4d62882dcdf6bc9fc2005c74573b2240c", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:a6e5ded6ba1592d16d507e4f87b6078156d99e9554184a9912a3a91819ebb5df", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:a90844f8b8a25de5abadb4887f1b1ac84367f5ae248d9213a90a39859b3e5df3", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:40b455ce92e388b7f1eb0c65645000ae54076221c2acce0fa34c6f8d29d6ee67", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:392808628481e796663a1b99d1340efca31995d4832ec45fe71a939f12c117e7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:59557d1383fbb0a9586e18a4b129912d3ff989dbb853ed29bd0e27dfc160351d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:c850d134886113631f28665513a0536ca98fce16e53a9b3f146d1449ae9e0ee5", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:208231fcd39727d36f759dca410d8675e5852b7330f966aa86dc6e37c9abb22b", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:593cf420e0ac5523489f53d4b0cf2af0eaf8821d841f947349963159834a764a", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:630a305bf2ae45b8211c97cd029f1ae4247e0a00f936d8595e3cff59570cbd5f", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:104ca47441ca07c42c5e4770c1eae2178d2cdb880a174581032c7f846a05fb6e", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:b0baf75f1edb1c27f1caf49a30874604f82791ee1b1c85c38a06195f8d806b0e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:ba9aeb152542b5bd253d5a6e3b6aeff3e857615f4f42836c19098d45263fb120", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:177e6ff2bd9b8e6800b6138497d26b5cdd005046f6c62f672ecc66701b1251c9", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:32c9122d027bf6229b8cf18a4d45fc63e38c5b0a3656312854833e4342e0e608", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:42c6066d4a0751cb1db4526c055b0527a4d9403b45794571ea0dc4c71a666bec", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:997985873de7774ecab07db71db7974723494b65a569e2f852977c25d381359c", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:80496dfdf52576d83029c83097446766868b289a06aab9e9df110b733594a98e", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:c739061ae87ecfdebea9afd0b8021aa3ea154e8e1ef00ba148c82d225ee0c8d2", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:81d97ceabbc97f1b524d3e0e60904f5225fcc44996a83d9db67b7ef3d8b18075", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:8eecfbe8b3b75068c3c26b6fee1cd79009098d65b962b8a847438e8c31e9d053", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:2ae3ef5124e180523c5f610cbd536ad55c7e0b8e7c551201c29827e59c7c1594", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:34f943a522e251615c58df783c4ace2086a1752a3b69e5cbfef2ec5d42234da5", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:a0a2baa7b6c1d5ed5e5582f7ffc7d5a8cf2d4e7d034f50b1f3d0972fc9674939", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:78f45331782c43239be7330f5b928d9dace6b3ebbfda5e07c1374c462fe06923", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:41297d28af002c4e97c864d3b5ee64f49519b4db72a71b5bf7cd104c2b05af0a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:2d6c154dc940a2c178931902f7e0c0a1e9f9956055f92fc1bc92b1f2143a674d", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:33d8e3fbc9f8f48ff7a69685721a782c9f8b62bbbd1878e9bafefad5bdcf51db", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:653fca3667c90bf30da196ab61d79ee5afe1ae9703324b2512180986eec8d6c2", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:949cb7c7b62d17c998f63d9970d6fefbf5b3d56d65f729bf21a4f6703135e3f4", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:c1e22e4778b465a08d815aaf53d71ba28122b061bef976f522a2304366849a2d", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:a308db644962bd0893fe1b8bc6571460b377f728ac28632852ca3b9c281ed74e", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:9925a9acfb6375d93a08546a581a90375ee8582972cfc9d6884204d538b895e6", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:0c9136b18fb83249b1dd825fd497435d852adfaddc9d618ac4d269843a458317", "enabled": 1 } }, "ktls": { "100": { "checksum": "sha256:f15a20f050208e43060eafa61f63a8e722792b76724c7f2fc44c856879ac70ae", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f2322f689c55de691d98651af5bfece0b87608950ccd1a92e9225cfe47415851", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:454587674794c66f8b25f9e90154c291e81f6ab93d7c8fb3107068cfcefb797d", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:4d05909abe38f75a72561bb28fb279f4771d6886406de5d4665111db56181972", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:dbd4d9d61f7e57925f7a61e0a42d65273d8be168f6e3c77b5467d7b9a93817ff", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:3121357ab50a02cfc634a5fe4250aff89a1418865918569b77a10cd333cc0018", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:3390d25acd3ece1c7404db8c3db0f5c80278d5063fab9c8f4a8bb5584b5ded16", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:bc457c7839567f5943e06ec31f915742988f5e602c918a3a0d46bde5b94b6c78", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:d369ef834c0087ca09871e4dff0128cfc8e39a97e1e3b5bd3001fd752b7af5cb", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:c739c49825488aa1ae74fd218a5718aa3c859cd1205a1ea581710fe539bfbde6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:6a59e4d4df92e3d73d66b34035aaf00f5ca0306da24bd478c72a39c7e7844960", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4196d8e4db83bd37b4e883383dfe8543fb33029b42c557fe5af7e8475b558584", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:5427ae01212227c3a719cd1e5664c1290175bd574d7927903102147fa51989c0", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:7d1a24bbfe8deb3a3d7aaa92bfc9c922baba1476561b92f828aae226fe9dc3c4", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:b772895524eef04c9c79093c837e6033beff39717343d76528a8a85e4a466bb6", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:5017fd004213b4ceaf374bebf74e35a0084faaf6cede37b78769036a05e34b9e", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:7c71eef6360c66869a42a19a34ee30abc1064de8fbbcec0098d2ee57fbedb79a", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:cf5a647f3682f454b850317643416460ce6a7710f3f5fec6b0deac40e3c72e07", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:067389c903715a12a93937a436e3df918c42a4871765668bea50eca4f02212ba", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:6cffe11f14b5c03ba0969f0a3f476455cfac505f2cc1f2d467222a21a3ed7c5c", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1ea9c32ae0a7becd1e1879dd4c4b367d450b2721dd8fc3f771081d1568b450f5", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ea5057da646444d5450ff16e5dcb82ab338e8fd5fcf5f8dd72e782ef18ad1031", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:8de073e5cf69c58d03162e50f5fe7537ac8f90c81f02d2906cb10a910a414ec7", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:7d0336a428c29ae9a91c18857f594a16f74f5a963607fff966e7de78102ff76b", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:0464738bfa038fc9ba7ce06c15abf3ff5c2113083e236dd8b96b5d85b1fb51b7", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:9489c6c732b353e34ed3e5624fe8b73c336f4786c47bc30827b4a5a59b7dca44", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:660ecac63132d47b51afaeea6f55f74e3a6f25141a4d0d28065e094d7cdc6c75", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:b0a2d9c52715e340983df89e8adb304ff3790b2564659fd821843a3f172d46d0", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:04b77283c6d821ca98ecb58ef7bd17f6f185168786887a67f4c71cceeaa0476c", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:ff9433431cb560a4ff03dc02129289a0f78d1909fe1f3954347f18e318c3cdc4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:dc069f3a6c78dc367c39cd7e50fe17948cf9877f3e306f090f1160b07989d503", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6890958fb0f7c357a4a9600c34e21bf6fc9fd8ef36e9a5ad516b3bf2c1d88bd6", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b61027e2a84c3f6fffbc7eb3fd40788bd9dfb036b3e04a8f77d233e10c9f2ec8", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:e08540cc55168dd36811b1962936ffacaa21be50b15b9d5d34fa9d55dfd125d8", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:bd730a6479baa42060a62b9c7346dfe21ce28e1a8a432342aa5f302c2cf8ef86", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:01131128229571749a7f5df2e65e22e9850789bfe386926cb34e91153ca9e88c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:edb0f4d496b429a2b09ff9b1d74bd30126b5ee2265a4370f6e992cf9d696de0e", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:b28911955f6731646cd779f6b89c2255238c3e60e1b93d227ce588484694f755", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8bc2fc39e9a6cef06df178607ff3e17604e86d709575d37a60de5c1fd2b9fead", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:6980bdebf1af99aa6822dc970cd6d5a5b430381aa11e96e40244db39265b5e4f", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:3b235676dff7abd25b2b57fa770833d05561bdd24216f4de1202e9ced52a4f4a", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:33be40fa2b50df5f7234ead34a6471ff1eea62de62445e509c28e5bc8a730364", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:0d4fd8a1f74c8e46c18a93794b305dcccf3d50e9db095b659d996712e2905dc0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:d4f61bea290cce978cbb1653866414f9f848bc56ee6491cf022e9131dd2ff5fe", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:6f174abacc65b0de9248c39a31210eecb6fdbcd15ecff5bc254fb0d366f83806", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5053d74b0f4734131234b4faf6cf7815a725bfd5b73b6acf07deb77a3cced1e2", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:0538a3f6b5c469223bfb2740d7365838eedf7ef65b89353645e9d3bf6e17253c", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:f8b11739918f67700fbef58c2ab5c87a61413acf6aa8b650a014285c0c3684e2", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:a3b7c308fe73bec0edcfceb85e1e1799927a4d7e25ec4314649b447f670a49ef", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:dd752acc5dc10414a4708dc0bc655d7861bfa74bb20863aa10335dacc53357ba", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:bd4724acfb4c0ec9283595e24e29f9926c18e7af0169fd5eb344ed00de6bf393", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f1e989b744c90ee0be0978d34da65a84fdd81e5b6aef8ba116560bc157d73f0a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:d2bd05813a6a5257688f9bb486a1bda49fb169eab4f16c3d503e01883c52bd11", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:03597af2e3a916f7c4eb83e1b360b24cad9e86ce814494bd68da602991a70e7e", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:66173ad07abd0c8bb7e529350399507549601923afeca8e2ff2b0f80cb9992e3", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:3399e9663584d6d1032992f903b7aba4f96f4f0b7a5971faf90eb816cc7655b3", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:c1107cdfed17e78cabd9094b3f6aa1d9537f70bb4ddfc236983cc5fdc167e8ca", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c73d5f710032819a6456d1020ef5fc8bb683aeb167b6169f56a295c31b14c72d", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:d733f8dbbcdcfa398f6f139831236fa6cd0abdf132090435bb647081d2f6a785", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:44657ecdfa5bc1235f85a50222e025ac4721b24a01af6d167525f7cb0a580c31", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:92ded69a63e7ecda34b1d8ef17ffae8c9e8075046a724f8f8242f4b66d2eff19", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:5dc833e3b3dd31a1af446c7883f6a2b92c40b9192d072ef5de2fda7ddf4f84ad", "enabled": 1 } }, "passt": { "200": { "checksum": "sha256:d778011449f026622cc05ab496a39b6aa55a7e6447621a5ff7afc242b155b0e2", "enabled": 1 } }, "passt-repair": { "200": { "checksum": "sha256:7db523cb1e14c32587544907a28237c09c418307c349a9c6c5a0095c9ef22533", "enabled": 1 } }, "pasta": { "200": { "checksum": "sha256:cbdee1f9990db7defe1393b55569dcf01a84786f38a49e923b023c7c87bc2571", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:924bf0bf4f0b2ea9d633ef46f55793acb2eb3da6379bacd355814507e5ddf67a", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:8d6835bdf52f73dfd1acf73ce13ea8325b0bd3d0107b0ba86953fe2fbee20330", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:016a326cb4a747756723c0e7d675e4992e8abfd1f51a6c06aa93066bf45412ea", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:ee292c9774f2109ffcef5b2a1ac7ae68e44f719ba40d155f84287fe03a6c01af", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:5d77621f8da0f789c1b9ea9ac24925e02e0a7fe2a3a26cd7e5f46085277041bc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:6cfcf3051765f61e954cd243d3b652cee14d378e4925b12569512e5ae815b40e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:07669cb2df2c61ec4cb621f3332f77f351facaaf5232a8a72c61a5ee7bb44d71", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:24e235787e311d82b99df7b41d724da0e18edc3bc6443f9f83f8d6247e33cbac", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:2c0350e46ff4eb97af27f63025763c565d7097457d4cde6f46088afe7f8929e9", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6c7d4f4b8227aa55a5f142bbb8faef130cd10710101eb6f0aacb62547db5f49b", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:d59109d36dd2868269eb18631e37feb5981db0aa780c55f7e0fb66d897e4f48c", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:93a95273e16837c24572e635d58446ed1162ecbfed59695e866058df4dcbec2c", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:f878b2cf560b4bdff33fedf8c8f2011af390b77ee8f9416fe93ebf46153c97d0", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:7c128725a61bd30f3e35f39b9a832e5cd3ef435dde58241616b24e28f67ffbe1", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:60153b9f850c92927ce2a61becd9c248ef56dc0ceb7ba990185b98eaa9b011bd", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:ae9f1c81d0877b9f40c9d9bb5b862b7c58c73da9045f850a0a72d1b982fada35", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:8d550f8b9e80beafd06bc1392e60ecba8e922f8d0e609fb6674de5cf27c8d772", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ff82ca8bf6365948aeaf3c14fbc7ea9a212074d1462a31aa676b542d0d76c882", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:664148c3f8d4a649714cdbcf15e4862a5e648e0aea83d4530d23866c78c8d8d0", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:d58fb38422b37d406bf3e79136e3a94a40885c08f9c1591975c9a7495b7f606d", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:8194c7df0ea3abd18f07481b0181e01c5fddb21ebb594ed5b20bc1ced555fb27", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:ef1377e6864d9b5049866f6f0c3986e474499f1bb0082e9430f208e2c9d84b54", "enabled": 1 } }, "qgs": { "100": { "checksum": "sha256:add48a13d9b3cc5c82c73c2ca7d72db10b074970c14e26d58b88f670f9221655", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:c5e1779123c640fc55da0871bfd96bb124d8c9b50b9065136c025c83364f453e", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:71a7ff78c03cde811d19a4c115de8a898007bdf437a9350d4708b3f9142481c6", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:e66ffb20855170cda4ec60840ce05e73d69dcc54330c86b24dd89ee96bcd1d73", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:682232f167f6ecaafcb051df5557addc52b814e923f143bf37a2035fb17315ae", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:0fede9cbfe184d19e8ac7bb68a1ce8a110aa45898ca782e3c9daa5649a476fba", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:01fbaabbb5b83721fe19a813401d94510f6fb260714c3adcc40d54fbb994ef70", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:a8e3e2b90df3917dbaf684a1bdf72432d8bf2aa6ec41233e06a2eaf02aa81686", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:8d5ee75190133ca16f3931a80ba1202b6cc171e6a3b1cba6dc5788a33bc84e0a", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:fdf6e82be7b620aaea9c8928edc39344d32dd9b1c4e0f78a6c6fba39bc005b6d", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4788c42c425e54a8dedb4882a6a2bd2183ad72f980f4217299be830afe275069", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:7d65968a2e3d186de718f9f6604f2cce60bd08bab6dbe0e60f60222b228a5744", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:78d9abb7263a5c028d7065c0cadcfe14daf3b4aa064e679458f3bf271a69d2e5", "enabled": 1 } }, "redfish-finder": { "100": { "checksum": "sha256:e05fc89dc14e7a723647597786aa62adc255ca1301474ff0c29dff49e4176e4d", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:825a97c385fbcbfff670278b26a17f91bbfa8585f2219efc48781e0e510bf213", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:695b31e12a82435b57e11459e99444fec8d09aba051b1a12b8efa765608dc719", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:892885a058782b7fdfb5d86e5ec3ecca261363a14a2254652c6a7ff8a52807ae", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:39bc17cbd08c0377eb935fd0ca86b6542752c5ce07cb0f9d9e5d8adfe4306a13", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:3da6785a2c37296fb1ba2a1b621ebccc9e0837d9acf69b3442e75f3a60f2a484", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:912bf2ea73ebbfd1d5fefee37b336a9002345d01f8eb54cb164c28160fc4f1c1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:66b1ecc6382afc5032df2921281550af0431befd8cd517c4f8c68cab2eac0e11", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:4ed93113b5ea0760e89533919f86cf1dd26b5587a9d7cf8bd951896fc77d7fa9", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:008a840aa2183d0fbf1b3f3bb9542a7ba51c03a1e3a415b188ca49d2e4ed7e51", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:3ba51ade82ac9113ee060bb118c88deccc4a7732312c57576fd72a70f40154aa", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:b4fc4fbb8572088eb785b643f5d103d5791af96d37e6cce850d671d9291bf70f", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:6b4e7757f0422a2c54d93e920ff7b2c5bd894d495065b3827a741a768f042b18", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:702d5df73a6865bc249ffb537ad7a0d2388e1540716e4b2f7e844485870e37bb", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:4cfda0dd9868ff0890c7a612f07c282a8cbe4a319c766d7cf842ed639fc2b34c", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:64c59a71e1786fba000398e05773c83fbbd9f92c0341e52cbefd1386357b4e16", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:2f0c18590911b20c58bbc9db0c9c0c471f4d66171f7400079a2e956366580e24", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:f19a726a7c78ddd9aafcf8d2c4b6a57bd05fdc8450a91119e1f0d0abc09151dd", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:b29d987a469d59767e7120202e2abad06865eaa84d3eb61d2ae6b7a78c1d6dca", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:44e8808dad842eb55d51c204374ef445bd8515701db580d2c91f06ca9949f2f6", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:4b1585496c5777fe140f76f11a62df0ddad219336fac090139efbc368520d38c", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:2a990092d1cf38541a49375e9e605d82515a34e19b9ab6b70392afb596e0c612", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:80bda9a30a4b5ab4b6b14d7f6c92efbfd5a63658a4b44565a02c2c552cf4a28c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:405780af5278be0dd7f89425f91ca1c48527743d2b6876bdbdcc7545d487dc09", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:f76f5b094e42967dc240e161cb187bc528f2f2a3ee2ab93c53c0b15d820c0921", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:99c31c501752dfcb8460f44b4e363b9d57b85c3ad422a951f13f2d42e5f9f54b", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:8361387196f6c48bbed95c77561bdd324ab96356d6dd0f4874832accc67738a4", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:89169ffed763d6257769d5ed83185a9eb376145baa60dbf01b4088f37aa663bb", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:7727a62bcf612392c76d46f3cc8c22f33c3c87c30a320805ac9844ce68409ecf", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:1ad633f30ae0f80052b31090652780dab90b10696c098ac81ea831035a652835", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:c9cbfb3894148ab693f0c850232f3a1b1aefe5c5cf5f4a06bc74d44cdd2b52f5", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:67b8654cf2404ad763f5343ad3ded35f198c26e99b8a9a150143911acc89ac6c", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:6ce5485715b3caab30a72313601de971e7118bc2997a2edf6ce7b229e51c2483", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:9ff7693f6fb994a0a53dc46230b7ce6c4fe6dccc2b2ec2c8ba49f7c1e3f24eea", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c888a4b5fc698c1bf7551bfbc6d6ea7673a5f7f41d2467af7e15ce634c71e2be", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:1ed05c5ce069437c9de8a57326a0329d883ec753f3a11fe4f70a43ad212ec482", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:191a531a60c27b33fadbdb48213980f03b68efec3287545eff3592fcdf4bf686", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:e6f726edf701657c80853712b94a4bf5dd0430254d93db45804e60a243c51818", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:8a6ef7c3d8ee76e112224e0c4e0b91572db8c85f547bbed6d7ce3f6f6d4383de", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:cc162915cf1fc3cc66616c3224e9e848485198a28868c237adc9d7077791cba8", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:74b5c41b13bd849ce82040012f557fec4b9cfad3a9072f9f17f78400868da558", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:91acb71305dfde220ce7574e2ac67af16e6f8630639dc66d494cbf8120d2d07a", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:9b8a5c1ff4c21846701eb5e0603cc022f4530c568db6d9fab392e41c0ed64720", "enabled": 1 } }, "slrnpull": { "100": { "checksum": "sha256:bcf004c239b72d23fb4f1e5842272bc20f287cd312ed394464db8cb9218f4377", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:fc3eaf23ee99b98d2ff17a5df04776e8553f490d7f57d49a24061cd49bfaa997", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:17d8fa5ce4b9402dfb10ad431241cb2a5a1b2f726caa03ae7f1d7d410c2ab6ae", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:6506687dbaf850c784d6f2af14197d3c1768514fad98e08fea69e92a780ff65f", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:59b6f3643d2f404ef03d749628b6872fd650b5b10851862b4accad8276bc6f29", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:34b45f69552f2b284b1f6e0876e4a96d1c05c28e4ab42d2bc2a241c03fa73309", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:35ef9c580c4071208af6169ae1059bfee51938d36dbec2bc2354d51ed5dc505d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:5594f07c04c9057b74df1612012c2515265ee04d58b11bfa46a73531b703c1f7", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:b00a50f92d0e8ef2789d03756c7bee69f983edfc4a3f409304835ad25133e3a4", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:874410d4edbbd1f73ef0e69ea40e93054a5d65cfe1556b00f6b474b928400a39", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:400e9b1c9ace97d2e43b5916b453d189a5c6f60133876f15672a48607edfd0ba", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:66beadff1a4ed7e48b3f3cee1444f5f1aaa833d212cdc76068f2f306b8455970", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:fd8c0b8cc073d8025ab8754b7885e0375b4e700dd3fcc921c45666829b652de5", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:1b2a0e330daa04838742fdcd50a9b539072c58d48e949e4a3ce7933da47cbe3c", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:2ab07a8deeb7ef4cf09f94bd2ba250166a4d016bd9c581ddd470ab2784baf5e3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:e7caeb60df6f2002f7be4adc7a1506b6fb585e6bb9f4585381c115a90bff4a15", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:836d01ecc314a2b2b4eaaea69ce1e4a03f3274bd8bd25e2b64d0329e6f9d8f32", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:e2c86cd06c00d3ed79b9f7a602b18593d5929156df58e761a04a3cc3ba8be891", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:67fec37a17724a9b059f936b70c199d96906b9bbf703dd8a1670852dbfc7715f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:dd116a718e125ba88d28936b746a2292088080254134d2001084e2d252ce9379", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:df73dbc3f1e232bb5f4d3ba0bd1850eae3c3bc401508b1819c0989b8f67f8033", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2eb63b8ac8f3038eb1ff3bc18fc5923dee4ac3f609d8a14791300ae835249a9a", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:d342a188298c1fcd4df99c4235985c50ba2f02a4e53d01cef3de48bc31464ceb", "enabled": 1 } }, "switcheroo": { "100": { "checksum": "sha256:f8f67d2c990489a09a436dbd72704b13d6617fdbbb8c5c2c040a85b584de6a7b", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:a8f135ef10becc2a2ffd4e7faf89932ed4aff16331eb62d59e52ff2a5c0966e7", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:fc1ca3d8b12406dfef9f012c9275817169fbfafc411969e60d357be3b35835a8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:ab2acab6cbf273ed7e78e577b0e2a85225adba387b1a8908b180b07adb950e6f", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:815d229f0b5a8f8a44cd511b5927febb002596a8aad1b85406d674e59378a0e5", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:2a643246c63d64d4c57f3877ff3daca2637b195330920c2efd840ebade3fc20b", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:f3896d2de3794d7dd54fea03cbebcdf4e6b63bcc512d2fc14433b3be400f4188", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bbfd79953db88f6db10739803d29b003d83311a21c75604d64ed9fae26da541a", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:71c6423e6318342438fea1ba8a38751b5741b4482ca8ed075dbdd36bc6fda9aa", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:f482585c8f26517c6ed8e9203bec4adadec8ebc65840089d7483e31ee24fa679", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:a5312c216b56620ca8e69679e99275e793b3de9b6e524db1a5678d22b9909056", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:3a4e10afbea76bb0a825f3e10b6be09c1e380f19737aef7a6171a9744c15b33f", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:58aac19837bee6fd1c5e3d1e2a9c9900c56b9aff34b643fa9d958399152afbce", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:46f7b10654f710546a61324618f68b753849ea0b6a7e11f431922a5c848fae89", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:f3d5b0012a6f6d0255e831f608cf0d77f1af38a975b222a7f71cf0821f359246", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:2d749a0f3d39317412feb3388eec0eacb60859891ea7da50373271f03ab66c5a", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:5b1a3e31fee719423530b8c7c07b6649ab539d38f2b446a3e6d3f029a65696ae", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:561814e9fa4d9ffa1be3bcc8e27ee1a50260293a17de3db6eb9d4a83e14e8faf", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:48fac9542e02d0c8f461e03905339795331b4fcb2082e830e83189e50af59040", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:80d84cb83923e4d5d6b9870b4311a67c87609f010c5ffcdcb00ef6e926a8d785", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:33a8bba7a36dc094b6220c0dfe282a9e57ff280511965c99d654f4e584f960f0", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:38e42ce3f0baba47216f3b50d7bec9ac531a11d659c8807d0bb43b5e5b4ce873", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e9267049c61e87edd481214c8cedfc02cb396789c52a150b58d8fbf0401bd455", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:2f55ef3a5145328ed09f316753cec5b85f67c1b43902be5152fc57c4b95c3026", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:51ec0952bf860ec23e3bfdfd53f3bfad841a4e5b560cc25a9548c9b207504194", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ef06a218a285a5a01a1e354d6a40f826815203dc323d00ad68e29f85162c24e7", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:f71781a997aa0d0df5c9baa600b6212105c75cc290bf634a198ed0d5b42a668d", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:f58eadcb76889082e3a109afa993bc7eeed39675991d171a13744bc8b61c279a", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:4b8e317234ae08c1f4a80133c8abba35d412f5797db3c4515d0cf051c35af6bd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:3c2a65084450b2459115a69bb1d382e452a1da63080ac7fdc85bcac36affe1c7", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:ca220cb87bf9790b38738b6f08cc800a2fd0e083960aa4770c9385b897cd31cd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:cfcecf645d2d8a59f98135435d535133a39f70f46d9b47a65b15e88a3805861a", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:91a33317bdd39510dd305d768e2791d08b207d8384bfca22322ec49f5b26f9bd", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:c500e8df08994b81cc1d743db684060d03bfe4465fc12eea9a4af83a69af307b", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:db1d0917d263b447f9a744edfd4ebfeca697182c853295c7eaf49f1270218858", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:84679e67832759be8220885abe3fa0157305fc8f50efa604b1343e99907925dc", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:5ca3d53e3b62d5973442d210faf9b9f5f9b5f4935a74074ce4b18836c8d78b19", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d8fadd99af0d343c815f006330529911a5106641ed9c7d22a2eb72e0d9d55d2d", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:664ab4aa1e1eca422d2c627a22a9631ac348221893713bd9a4d97a628094b1b0", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e68a71817476b5ebb8ae2e13e9ea9418a31dd64ffe4e156258cb77029635cefa", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:f45c6d89a3305814e44a05c0d8c8f8a4ce8a923d721e83c9579f76d8d8cd909d", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:8d828eef8065f2486b815aea04ed491419e3bf17508cf0ce595fca71f872ba38", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:76a11dd14f578f940e874ab4d68ca1370ddfcb2585b6a3a955569fadb77d269f", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:17759c6e3a6229e4a40be0b8121751d768f00fd6ea0a872f4fe65bebe2280b30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:c9c26249a11c4bace4efa998ae826c3cd5178a19d323886a62b7e355ca3d8260", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:ea826918681193d37db69c814ee4c753fef3fcca809cd0fad6f924f829eeb9eb", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:a9e221f7f656f9f0b4937c2bd0f7b93124c7f48f4c88fe8ba608db1eaa5f05d1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:034bceb856cf79ac9329a4affb6cc53cf29c5bebb089c0ddd486a76148812b89", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ea40fa389e6fc510f40994b9b4272a6b985c80064b8a4d702d5813d5252487f5", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:308910f855a076bdf38241880815f6640dfba4b21ef1be58112deec3ed858d16", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:dd07546e8a114e1b7f5056d4c5b0f1256050fe93e867fbbb6c5f52d2c6f77ec6", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:870a818c9c3a4e4d24386bfc3fc7565af1c8aeec605b3d4cd819169172bb3e03", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:476c08aa43723ad6bb98a7254bc6cdad6ddab4aa63336719c192bbf6f5ba6700", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:e27315e58a548c06561117f2dcf86c67e6937dc1ef2071ee612975457091e40c", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:a077f44cc6d16684de9a93061ee0f7b212e3f729fdbdf594dee573fe5c30817d", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:8228eda847eeaa7529b089edb8c64763d03100e84117526a67fbb41ea006a2b0", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:170 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.178) 0:02:23.632 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:183 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.046) 0:02:23.678 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:191 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.026) 0:02:23.704 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.034) 0:02:23.739 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.029) 0:02:23.768 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.021) 0:02:23.789 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.024) 0:02:23.814 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.022) 0:02:23.837 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.166) 0:02:24.003 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "debug": true, "log_level": "debug", "state": "started" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: httpd1\nspec:\n containers:\n - command:\n - /bin/busybox-extras\n - httpd\n - -f\n - -p\n - 80\n image: quay.io/libpod/testimage:20210610\n name: httpd1\n ports:\n - containerPort: 80\n hostPort: 15001\n volumeMounts:\n - mountPath: /var/www:Z\n name: www\n - mountPath: /var/httpd-create:Z\n name: create\n workingDir: /var/www\n volumes:\n - hostPath:\n path: /tmp/lsr_od4netlk_podman/httpd1\n name: www\n - hostPath:\n path: /tmp/lsr_od4netlk_podman/httpd1-create\n name: create\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.042) 0:02:24.046 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "httpd1" }, "spec": { "containers": [ { "command": [ "/bin/busybox-extras", "httpd", "-f", "-p", 80 ], "image": "quay.io/libpod/testimage:20210610", "name": "httpd1", "ports": [ { "containerPort": 80, "hostPort": 15001 } ], "volumeMounts": [ { "mountPath": "/var/www:Z", "name": "www" }, { "mountPath": "/var/httpd-create:Z", "name": "create" } ], "workingDir": "/var/www" } ], "volumes": [ { "hostPath": { "path": "/tmp/lsr_od4netlk_podman/httpd1" }, "name": "www" }, { "hostPath": { "path": "/tmp/lsr_od4netlk_podman/httpd1-create" }, "name": "create" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "podman_basic_user" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.060) 0:02:24.106 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_name": "httpd1", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:42 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.046) 0:02:24.153 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:10 Saturday 07 March 2026 11:45:38 -0500 (0:00:00.062) 0:02:24.215 ******** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "podman_basic_user": [ "x", "3001", "3001", "", "/home/podman_basic_user", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:17 Saturday 07 March 2026 11:45:39 -0500 (0:00:00.427) 0:02:24.643 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_handle_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:24 Saturday 07 March 2026 11:45:39 -0500 (0:00:00.039) 0:02:24.682 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "3001" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 07 March 2026 11:45:39 -0500 (0:00:00.051) 0:02:24.734 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901838.6646128, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "9117e8a5afa3220d98f04938893af461a8e3008b", "ctime": 1772901831.1052737, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9335075, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1771804800.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1635770157", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 07 March 2026 11:45:39 -0500 (0:00:00.389) 0:02:25.123 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "podman_basic_user" ], "delta": "0:00:00.003602", "end": "2026-03-07 11:45:39.956903", "rc": 0, "start": "2026-03-07 11:45:39.953301" } STDOUT: 0: podman_basic_user 524288 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.400) 0:02:25.524 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "podman_basic_user" ], "delta": "0:00:00.005239", "end": "2026-03-07 11:45:40.334619", "rc": 0, "start": "2026-03-07 11:45:40.329380" } STDOUT: 0: podman_basic_user 524288 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.394) 0:02:25.918 ******** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "podman_basic_user": { "range": 65536, "start": 524288 } }, "podman_subuid_info": { "podman_basic_user": { "range": 65536, "start": 524288 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:73 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.069) 0:02:25.987 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:78 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.041) 0:02:26.029 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:83 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.041) 0:02:26.071 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:93 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.039) 0:02:26.110 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:100 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.037) 0:02:26.148 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:48 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.029) 0:02:26.177 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube is none or __podman_kube | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:57 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.033) 0:02:26.211 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_systemd_scope": "user", "__podman_xdg_runtime_dir": "/run/user/3001" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.046) 0:02:26.257 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/home/podman_basic_user/.config/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:70 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.037) 0:02:26.295 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:74 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.030) 0:02:26.325 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:81 Saturday 07 March 2026 11:45:40 -0500 (0:00:00.030) 0:02:26.356 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "systemd-escape", "--template", "podman-kube@.service", "/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml" ], "delta": "0:00:00.005273", "end": "2026-03-07 11:45:41.163376", "rc": 0, "start": "2026-03-07 11:45:41.158103" } STDOUT: podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:89 Saturday 07 March 2026 11:45:41 -0500 (0:00:00.378) 0:02:26.734 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:93 Saturday 07 March 2026 11:45:41 -0500 (0:00:00.080) 0:02:26.814 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2 Saturday 07 March 2026 11:45:41 -0500 (0:00:00.068) 0:02:26.883 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:13 Saturday 07 March 2026 11:45:41 -0500 (0:00:00.045) 0:02:26.929 ******** changed: [managed-node2] => { "changed": true, "cmd": [ "loginctl", "enable-linger", "podman_basic_user" ], "delta": "0:00:00.020398", "end": "2026-03-07 11:45:41.748818", "rc": 0, "start": "2026-03-07 11:45:41.728420" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 07 March 2026 11:45:41 -0500 (0:00:00.424) 0:02:27.353 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 07 March 2026 11:45:41 -0500 (0:00:00.036) 0:02:27.390 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the host mount volumes] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7 Saturday 07 March 2026 11:45:41 -0500 (0:00:00.026) 0:02:27.416 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_volumes": [ "/tmp/lsr_od4netlk_podman/httpd1", "/tmp/lsr_od4netlk_podman/httpd1-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18 Saturday 07 March 2026 11:45:41 -0500 (0:00:00.042) 0:02:27.459 ******** changed: [managed-node2] => (item=/tmp/lsr_od4netlk_podman/httpd1) => { "ansible_loop_var": "item", "changed": true, "gid": 3001, "group": "podman_basic_user", "item": "/tmp/lsr_od4netlk_podman/httpd1", "mode": "0755", "owner": "podman_basic_user", "path": "/tmp/lsr_od4netlk_podman/httpd1", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 23, "state": "directory", "uid": 3001 } changed: [managed-node2] => (item=/tmp/lsr_od4netlk_podman/httpd1-create) => { "ansible_loop_var": "item", "changed": true, "gid": 3001, "group": "podman_basic_user", "item": "/tmp/lsr_od4netlk_podman/httpd1-create", "mode": "0755", "owner": "podman_basic_user", "path": "/tmp/lsr_od4netlk_podman/httpd1-create", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 3001 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29 Saturday 07 March 2026 11:45:42 -0500 (0:00:00.766) 0:02:28.226 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml:2 Saturday 07 March 2026 11:45:42 -0500 (0:00:00.044) 0:02:28.271 ******** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle images when not booted] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml:25 Saturday 07 March 2026 11:45:44 -0500 (0:00:01.783) 0:02:30.054 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_booted", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check the kubernetes yaml file] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:32 Saturday 07 March 2026 11:45:44 -0500 (0:00:00.040) 0:02:30.094 ******** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Ensure the kubernetes directory is present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:40 Saturday 07 March 2026 11:45:44 -0500 (0:00:00.389) 0:02:30.484 ******** changed: [managed-node2] => { "changed": true, "gid": 3001, "group": "podman_basic_user", "mode": "0755", "owner": "podman_basic_user", "path": "/home/podman_basic_user/.config/containers/ansible-kubernetes.d", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 6, "state": "directory", "uid": 3001 } TASK [fedora.linux_system_roles.podman : Ensure kubernetes yaml files are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:51 Saturday 07 March 2026 11:45:45 -0500 (0:00:00.424) 0:02:30.908 ******** changed: [managed-node2] => { "changed": true, "checksum": "5a374c59230176d446e6cd38bcc64da326c45092", "dest": "/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml", "gid": 3001, "group": "podman_basic_user", "md5sum": "06dc822c76328ef3b7c90c68335f43b8", "mode": "0644", "owner": "podman_basic_user", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 762, "src": "/root/.ansible/tmp/ansible-tmp-1772901945.4660614-14184-34473031875573/.source.yml", "state": "file", "uid": 3001 } TASK [fedora.linux_system_roles.podman : Update containers/pods] *************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:63 Saturday 07 March 2026 11:45:46 -0500 (0:00:00.730) 0:02:31.638 ******** changed: [managed-node2] => { "actions": [ "/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml" ], "changed": true } STDOUT: Pod: 46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399 Container: ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 STDERR: time="2026-03-07T11:45:46-05:00" level=info msg="/bin/podman filtering at log level debug" time="2026-03-07T11:45:46-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2026-03-07T11:45:46-05:00" level=info msg="Setting parallel job count to 7" time="2026-03-07T11:45:46-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2026-03-07T11:45:46-05:00" level=info msg="Using sqlite as database backend" time="2026-03-07T11:45:46-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2026-03-07T11:45:46-05:00" level=debug msg="Using graph driver overlay" time="2026-03-07T11:45:46-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2026-03-07T11:45:46-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2026-03-07T11:45:46-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2026-03-07T11:45:46-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2026-03-07T11:45:46-05:00" level=debug msg="Using transient store: false" time="2026-03-07T11:45:46-05:00" level=debug msg="Not configuring container store" time="2026-03-07T11:45:46-05:00" level=debug msg="Initializing event backend file" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2026-03-07T11:45:46-05:00" level=info msg="Creating a new rootless user namespace" time="2026-03-07T11:45:46-05:00" level=info msg="/bin/podman filtering at log level debug" time="2026-03-07T11:45:46-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2026-03-07T11:45:46-05:00" level=info msg="Setting parallel job count to 7" time="2026-03-07T11:45:46-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2026-03-07T11:45:46-05:00" level=info msg="Using sqlite as database backend" time="2026-03-07T11:45:46-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2026-03-07T11:45:46-05:00" level=debug msg="Using graph driver overlay" time="2026-03-07T11:45:46-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2026-03-07T11:45:46-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2026-03-07T11:45:46-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2026-03-07T11:45:46-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2026-03-07T11:45:46-05:00" level=debug msg="Using transient store: false" time="2026-03-07T11:45:46-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2026-03-07T11:45:46-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2026-03-07T11:45:46-05:00" level=debug msg="Initializing event backend file" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Successfully loaded 1 networks" time="2026-03-07T11:45:46-05:00" level=debug msg="found free device name podman1" time="2026-03-07T11:45:46-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2026-03-07T11:45:46-05:00" level=debug msg="Pod using bridge network mode" time="2026-03-07T11:45:46-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice for parent user.slice and name libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399" time="2026-03-07T11:45:46-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:46-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:46-05:00" level=debug msg="no command or entrypoint provided, and no CMD or ENTRYPOINT from image: defaulting to empty string" time="2026-03-07T11:45:46-05:00" level=debug msg="using systemd mode: false" time="2026-03-07T11:45:46-05:00" level=debug msg="setting container name 46ab0de2b796-infra" time="2026-03-07T11:45:46-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 196e978a743fccc03fb8ddd2d41a1f9a15d160f55231f9844a9070e6a9ce61ba bridge podman1 2026-03-07 11:45:46.711313386 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2026-03-07T11:45:46-05:00" level=debug msg="Successfully loaded 2 networks" time="2026-03-07T11:45:46-05:00" level=debug msg="Allocated lock 1 for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2026-03-07T11:45:46-05:00" level=debug msg="Check for idmapped mounts support " time="2026-03-07T11:45:46-05:00" level=debug msg="Created container \"a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Container \"a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Container \"a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\" has run directory \"/run/user/3001/containers/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:45:46-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:45:46-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2026-03-07T11:45:46-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:45:46-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:45:46-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:45:46-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:45:46-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:45:46-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:45:46-05:00" level=debug msg="using systemd mode: false" time="2026-03-07T11:45:46-05:00" level=debug msg="adding container to pod httpd1" time="2026-03-07T11:45:46-05:00" level=debug msg="setting container name httpd1-httpd1" time="2026-03-07T11:45:46-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2026-03-07T11:45:46-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /proc" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /dev" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /dev/pts" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /dev/mqueue" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /sys" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2026-03-07T11:45:46-05:00" level=debug msg="Allocated lock 2 for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Created container \"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Container \"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Container \"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\" has run directory \"/run/user/3001/containers/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Strongconnecting node a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:46-05:00" level=debug msg="Pushed a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 onto stack" time="2026-03-07T11:45:46-05:00" level=debug msg="Finishing node a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431. Popped a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 off stack" time="2026-03-07T11:45:46-05:00" level=debug msg="Strongconnecting node ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431" time="2026-03-07T11:45:46-05:00" level=debug msg="Pushed ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 onto stack" time="2026-03-07T11:45:46-05:00" level=debug msg="Finishing node ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431. Popped ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 off stack" time="2026-03-07T11:45:46-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-dcd66955-fe96-f197-416b-aad9b87d86cb for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:46-05:00" level=debug msg="Created root filesystem for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/rootfs/merge" time="2026-03-07T11:45:46-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2026-03-07T11:45:46-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2026-03-07T11:45:46-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::bridge] Using mtu 65520 from default route interface for the network [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/ip_forward to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/route_localnet to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink_route] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_196e978a_10_89_0_0_nm24 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "d6:83:e5:9a:8f:77", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Starting parent driver\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport112898145/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport112898145/.bp.sock]\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=Ready\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport is ready" time="2026-03-07T11:45:46-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2026-03-07T11:45:46-05:00" level=debug msg="Setting Cgroups for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 to user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice:libpod:a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:46-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2026-03-07T11:45:46-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/rootfs/merge\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Created OCI spec for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/config.json" time="2026-03-07T11:45:46-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice for parent user.slice and name libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399" time="2026-03-07T11:45:46-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:46-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:46-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2026-03-07T11:45:46-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 -u a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata -p /run/user/3001/containers/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/pidfile -n 46ab0de2b796-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431]" time="2026-03-07T11:45:46-05:00" level=info msg="Running conmon under slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice and unitName libpod-conmon-a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2026-03-07T11:45:47-05:00" level=debug msg="Received: 30855" time="2026-03-07T11:45:47-05:00" level=info msg="Got Conmon PID as 30853" time="2026-03-07T11:45:47-05:00" level=debug msg="Created container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 in OCI runtime" time="2026-03-07T11:45:47-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2026-03-07T11:45:47-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2026-03-07T11:45:47-05:00" level=debug msg="Starting container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 with command [/catatonit -P]" time="2026-03-07T11:45:47-05:00" level=debug msg="Started container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:47-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/JH6ZGMTVFTMNRQA4DOPAJEHHJA,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c60,c376\"" time="2026-03-07T11:45:47-05:00" level=debug msg="Mounted container \"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/merged\"" time="2026-03-07T11:45:47-05:00" level=debug msg="Created root filesystem for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 at /home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/merged" time="2026-03-07T11:45:47-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2026-03-07T11:45:47-05:00" level=debug msg="Setting Cgroups for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 to user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice:libpod:ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431" time="2026-03-07T11:45:47-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2026-03-07T11:45:47-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2026-03-07T11:45:47-05:00" level=debug msg="Created OCI spec for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/config.json" time="2026-03-07T11:45:47-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice for parent user.slice and name libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399" time="2026-03-07T11:45:47-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:47-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:47-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2026-03-07T11:45:47-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 -u ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata -p /run/user/3001/containers/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431]" time="2026-03-07T11:45:47-05:00" level=info msg="Running conmon under slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice and unitName libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2026-03-07T11:45:47-05:00" level=debug msg="Received: 30863" time="2026-03-07T11:45:47-05:00" level=info msg="Got Conmon PID as 30861" time="2026-03-07T11:45:47-05:00" level=debug msg="Created container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 in OCI runtime" time="2026-03-07T11:45:47-05:00" level=debug msg="Starting container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 with command [/bin/busybox-extras httpd -f -p 80]" time="2026-03-07T11:45:47-05:00" level=debug msg="Started container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431" time="2026-03-07T11:45:47-05:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2026-03-07T11:45:47-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=30780 time="2026-03-07T11:45:47-05:00" level=debug msg="Shutting down engines" TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:72 Saturday 07 March 2026 11:45:47 -0500 (0:00:01.122) 0:02:32.761 ******** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Enable service] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:85 Saturday 07 March 2026 11:45:47 -0500 (0:00:00.646) 0:02:33.407 ******** changed: [managed-node2] => { "changed": true, "enabled": true, "name": "podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "run-user-3001.mount -.mount network-online.target basic.target \"app-podman\\\\x2dkube.slice\"", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "\"man:podman-kube-play(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "Environment": "\"PODMAN_SYSTEMD_UNIT=podman-kube@-home-podman_basic_user-.config-containers-ansible\\\\x2dkubernetes.d-httpd1.yml.service\"", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/user/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3613913088", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"podman-kube@-home-podman_basic_user-.config-containers-ansible\\\\x2dkubernetes.d-httpd1.yml.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "\"app-podman\\\\x2dkube.slice\" basic.target", "RequiresMountsFor": "/run/user/3001/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 10s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-online.target", "WantsMountsFor": "/home/podman_basic_user", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/podman_basic_user" } } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:98 Saturday 07 March 2026 11:45:48 -0500 (0:00:00.694) 0:02:34.102 ******** changed: [managed-node2] => { "changed": true, "name": "podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target basic.target \"app-podman\\\\x2dkube.slice\" -.mount run-user-3001.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "default.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "\"man:podman-kube-play(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "Environment": "\"PODMAN_SYSTEMD_UNIT=podman-kube@-home-podman_basic_user-.config-containers-ansible\\\\x2dkubernetes.d-httpd1.yml.service\"", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/user/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3613306880", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"podman-kube@-home-podman_basic_user-.config-containers-ansible\\\\x2dkubernetes.d-httpd1.yml.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target \"app-podman\\\\x2dkube.slice\"", "RequiresMountsFor": "/run/user/3001/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 10s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "default.target", "Wants": "network-online.target", "WantsMountsFor": "/home/podman_basic_user", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/podman_basic_user" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:113 Saturday 07 March 2026 11:45:59 -0500 (0:00:11.296) 0:02:45.399 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Saturday 07 March 2026 11:45:59 -0500 (0:00:00.028) 0:02:45.428 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "debug": true, "log_level": "debug", "state": "started" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: httpd2\nspec:\n containers:\n - command:\n - /bin/busybox-extras\n - httpd\n - -f\n - -p\n - 80\n image: quay.io/libpod/testimage:20210610\n name: httpd2\n ports:\n - containerPort: 80\n hostPort: 15002\n volumeMounts:\n - mountPath: /var/www:Z\n name: www\n - mountPath: /var/httpd-create:Z\n name: create\n workingDir: /var/www\n volumes:\n - hostPath:\n path: /tmp/lsr_od4netlk_podman/httpd2\n name: www\n - hostPath:\n path: /tmp/lsr_od4netlk_podman/httpd2-create\n name: create\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Saturday 07 March 2026 11:45:59 -0500 (0:00:00.046) 0:02:45.474 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "httpd2" }, "spec": { "containers": [ { "command": [ "/bin/busybox-extras", "httpd", "-f", "-p", 80 ], "image": "quay.io/libpod/testimage:20210610", "name": "httpd2", "ports": [ { "containerPort": 80, "hostPort": 15002 } ], "volumeMounts": [ { "mountPath": "/var/www:Z", "name": "www" }, { "mountPath": "/var/httpd-create:Z", "name": "create" } ], "workingDir": "/var/www" } ], "volumes": [ { "hostPath": { "path": "/tmp/lsr_od4netlk_podman/httpd2" }, "name": "www" }, { "hostPath": { "path": "/tmp/lsr_od4netlk_podman/httpd2-create" }, "name": "create" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Saturday 07 March 2026 11:46:00 -0500 (0:00:00.060) 0:02:45.534 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_name": "httpd2", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:42 Saturday 07 March 2026 11:46:00 -0500 (0:00:00.106) 0:02:45.641 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:10 Saturday 07 March 2026 11:46:00 -0500 (0:00:00.049) 0:02:45.690 ******** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:17 Saturday 07 March 2026 11:46:00 -0500 (0:00:00.382) 0:02:46.073 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_handle_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:24 Saturday 07 March 2026 11:46:00 -0500 (0:00:00.030) 0:02:46.103 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 07 March 2026 11:46:00 -0500 (0:00:00.048) 0:02:46.151 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901838.6646128, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "9117e8a5afa3220d98f04938893af461a8e3008b", "ctime": 1772901831.1052737, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9335075, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1771804800.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1635770157", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.376) 0:02:46.528 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.027) 0:02:46.556 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.025) 0:02:46.581 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:73 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.024) 0:02:46.605 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:78 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.025) 0:02:46.631 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:83 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.024) 0:02:46.656 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:93 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.025) 0:02:46.681 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:100 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.024) 0:02:46.706 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:48 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.024) 0:02:46.730 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube is none or __podman_kube | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:57 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.029) 0:02:46.760 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_systemd_scope": "system", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.038) 0:02:46.798 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/etc/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:70 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.037) 0:02:46.835 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/httpd2.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:74 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.030) 0:02:46.866 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:81 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.033) 0:02:46.900 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "systemd-escape", "--template", "podman-kube@.service", "/etc/containers/ansible-kubernetes.d/httpd2.yml" ], "delta": "0:00:00.006211", "end": "2026-03-07 11:46:01.709453", "rc": 0, "start": "2026-03-07 11:46:01.703242" } STDOUT: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:89 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.380) 0:02:47.281 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:93 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.023) 0:02:47.305 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.056) 0:02:47.362 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:13 Saturday 07 March 2026 11:46:01 -0500 (0:00:00.069) 0:02:47.431 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 07 March 2026 11:46:02 -0500 (0:00:00.121) 0:02:47.552 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 07 March 2026 11:46:02 -0500 (0:00:00.037) 0:02:47.590 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the host mount volumes] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7 Saturday 07 March 2026 11:46:02 -0500 (0:00:00.037) 0:02:47.628 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_volumes": [ "/tmp/lsr_od4netlk_podman/httpd2", "/tmp/lsr_od4netlk_podman/httpd2-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18 Saturday 07 March 2026 11:46:02 -0500 (0:00:00.075) 0:02:47.704 ******** ok: [managed-node2] => (item=/tmp/lsr_od4netlk_podman/httpd2) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "/tmp/lsr_od4netlk_podman/httpd2", "mode": "0755", "owner": "root", "path": "/tmp/lsr_od4netlk_podman/httpd2", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 23, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=/tmp/lsr_od4netlk_podman/httpd2-create) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/lsr_od4netlk_podman/httpd2-create", "mode": "0755", "owner": "root", "path": "/tmp/lsr_od4netlk_podman/httpd2-create", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29 Saturday 07 March 2026 11:46:03 -0500 (0:00:00.810) 0:02:48.514 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml:2 Saturday 07 March 2026 11:46:03 -0500 (0:00:00.044) 0:02:48.558 ******** ok: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle images when not booted] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml:25 Saturday 07 March 2026 11:46:04 -0500 (0:00:01.044) 0:02:49.603 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_booted", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check the kubernetes yaml file] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:32 Saturday 07 March 2026 11:46:04 -0500 (0:00:00.037) 0:02:49.640 ******** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Ensure the kubernetes directory is present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:40 Saturday 07 March 2026 11:46:04 -0500 (0:00:00.397) 0:02:50.038 ******** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/ansible-kubernetes.d", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure kubernetes yaml files are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:51 Saturday 07 March 2026 11:46:04 -0500 (0:00:00.399) 0:02:50.437 ******** changed: [managed-node2] => { "changed": true, "checksum": "3ff675c4424d0c6a65148416b04367244e5cae81", "dest": "/etc/containers/ansible-kubernetes.d/httpd2.yml", "gid": 0, "group": "root", "md5sum": "15c66a476e3aa408bbc7fa503a01805c", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 762, "src": "/root/.ansible/tmp/ansible-tmp-1772901964.9849694-14845-101719091169581/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Update containers/pods] *************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:63 Saturday 07 March 2026 11:46:05 -0500 (0:00:00.686) 0:02:51.124 ******** changed: [managed-node2] => { "actions": [ "/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml" ], "changed": true } STDOUT: Pod: 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 Container: 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 STDERR: time="2026-03-07T11:46:06-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2026-03-07T11:46:06-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2026-03-07T11:46:06-05:00" level=info msg="Setting parallel job count to 7" time="2026-03-07T11:46:06-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2026-03-07T11:46:06-05:00" level=info msg="Using sqlite as database backend" time="2026-03-07T11:46:06-05:00" level=debug msg="Using graph driver overlay" time="2026-03-07T11:46:06-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Using run root /run/containers/storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2026-03-07T11:46:06-05:00" level=debug msg="Using tmp dir /run/libpod" time="2026-03-07T11:46:06-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2026-03-07T11:46:06-05:00" level=debug msg="Using transient store: false" time="2026-03-07T11:46:06-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2026-03-07T11:46:06-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2026-03-07T11:46:06-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2026-03-07T11:46:06-05:00" level=debug msg="Initializing event backend journald" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d bridge podman1 2026-03-07 11:44:18.828483768 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2026-03-07T11:46:06-05:00" level=debug msg="Successfully loaded 2 networks" time="2026-03-07T11:46:06-05:00" level=debug msg="Pod using bridge network mode" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice for parent machine.slice and name libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="no command or entrypoint provided, and no CMD or ENTRYPOINT from image: defaulting to empty string" time="2026-03-07T11:46:06-05:00" level=debug msg="using systemd mode: false" time="2026-03-07T11:46:06-05:00" level=debug msg="setting container name 09b7f33e3afd-infra" time="2026-03-07T11:46:06-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Allocated lock 1 for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2026-03-07T11:46:06-05:00" level=debug msg="Created container \"ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Container \"ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\" has work directory \"/var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Container \"ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\" has run directory \"/run/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:46:06-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:46:06-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2026-03-07T11:46:06-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:46:06-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:46:06-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:46:06-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:46:06-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:46:06-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:46:06-05:00" level=debug msg="using systemd mode: false" time="2026-03-07T11:46:06-05:00" level=debug msg="adding container to pod httpd2" time="2026-03-07T11:46:06-05:00" level=debug msg="setting container name httpd2-httpd2" time="2026-03-07T11:46:06-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2026-03-07T11:46:06-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /proc" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /dev" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /dev/pts" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /dev/mqueue" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /sys" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2026-03-07T11:46:06-05:00" level=debug msg="Allocated lock 2 for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Created container \"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Container \"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\" has work directory \"/var/lib/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Container \"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\" has run directory \"/run/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Strongconnecting node ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="Pushed ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b onto stack" time="2026-03-07T11:46:06-05:00" level=debug msg="Finishing node ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b. Popped ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b off stack" time="2026-03-07T11:46:06-05:00" level=debug msg="Strongconnecting node 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08" time="2026-03-07T11:46:06-05:00" level=debug msg="Pushed 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 onto stack" time="2026-03-07T11:46:06-05:00" level=debug msg="Finishing node 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08. Popped 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 off stack" time="2026-03-07T11:46:06-05:00" level=debug msg="Made network namespace at /run/netns/netns-57a5144e-40ac-4a85-01ac-9226ddb3e6f8 for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="Created root filesystem for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b at /var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/rootfs/merge" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::bridge] Using mtu 9001 from default route interface for the network [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/ip_forward to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/route_localnet to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink_route] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_52ab27bf_10_89_0_0_nm24 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "ee:98:79:da:ba:e0", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2026-03-07T11:46:06-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2026-03-07T11:46:06-05:00" level=debug msg="Setting Cgroups for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b to machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice:libpod:ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2026-03-07T11:46:06-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/rootfs/merge\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Created OCI spec for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b at /var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata/config.json" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice for parent machine.slice and name libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2026-03-07T11:46:06-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b -u ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata -p /run/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata/pidfile -n 09b7f33e3afd-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b]" time="2026-03-07T11:46:06-05:00" level=info msg="Running conmon under slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice and unitName libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope" time="2026-03-07T11:46:06-05:00" level=debug msg="Received: 33495" time="2026-03-07T11:46:06-05:00" level=info msg="Got Conmon PID as 33493" time="2026-03-07T11:46:06-05:00" level=debug msg="Created container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b in OCI runtime" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2026-03-07T11:46:06-05:00" level=debug msg="Starting container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b with command [/catatonit -P]" time="2026-03-07T11:46:06-05:00" level=debug msg="Started container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/MVNQJ7CO6BHDSUCUPVG3N5YCHU,upperdir=/var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/diff,workdir=/var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c91,c172\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Mounted container \"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\" at \"/var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/merged\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Created root filesystem for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 at /var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/merged" time="2026-03-07T11:46:06-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2026-03-07T11:46:06-05:00" level=debug msg="Setting Cgroups for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 to machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice:libpod:9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08" time="2026-03-07T11:46:06-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2026-03-07T11:46:06-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2026-03-07T11:46:06-05:00" level=debug msg="Created OCI spec for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 at /var/lib/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata/config.json" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice for parent machine.slice and name libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2026-03-07T11:46:06-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 -u 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata -p /run/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08]" time="2026-03-07T11:46:06-05:00" level=info msg="Running conmon under slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice and unitName libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope" time="2026-03-07T11:46:06-05:00" level=debug msg="Received: 33503" time="2026-03-07T11:46:06-05:00" level=info msg="Got Conmon PID as 33500" time="2026-03-07T11:46:06-05:00" level=debug msg="Created container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 in OCI runtime" time="2026-03-07T11:46:06-05:00" level=debug msg="Starting container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 with command [/bin/busybox-extras httpd -f -p 80]" time="2026-03-07T11:46:06-05:00" level=debug msg="Started container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08" time="2026-03-07T11:46:06-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2026-03-07T11:46:06-05:00" level=debug msg="Shutting down engines" time="2026-03-07T11:46:06-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=33436 TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:72 Saturday 07 March 2026 11:46:06 -0500 (0:00:00.847) 0:02:51.972 ******** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Enable service] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:85 Saturday 07 March 2026 11:46:07 -0500 (0:00:00.779) 0:02:52.751 ******** changed: [managed-node2] => { "changed": true, "enabled": true, "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target systemd-journald.socket basic.target -.mount sysinit.target \"system-podman\\\\x2dkube.slice\"", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "\"man:podman-kube-play(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "Environment": "\"PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-httpd2.yml.service\"", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/httpd2.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/httpd2.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/httpd2.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/httpd2.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3024691200", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-httpd2.yml.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount \"system-podman\\\\x2dkube.slice\" sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 10s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:98 Saturday 07 March 2026 11:46:08 -0500 (0:00:00.799) 0:02:53.550 ******** changed: [managed-node2] => { "changed": true, "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket basic.target sysinit.target network-online.target -.mount \"system-podman\\\\x2dkube.slice\"", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "\"man:podman-kube-play(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "Environment": "\"PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-httpd2.yml.service\"", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/httpd2.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/httpd2.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/httpd2.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/httpd2.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3019649024", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-httpd2.yml.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target \"system-podman\\\\x2dkube.slice\" -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 10s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:113 Saturday 07 March 2026 11:46:19 -0500 (0:00:11.297) 0:03:04.848 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Saturday 07 March 2026 11:46:19 -0500 (0:00:00.047) 0:03:04.895 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "state": "started" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: httpd3\nspec:\n containers:\n - name: httpd3\n image: quay.io/libpod/testimage:20210610\n command:\n - /bin/busybox-extras\n - httpd\n - -f\n - -p\n - 80\n ports:\n - containerPort: 80\n hostPort: 15003\n volumeMounts:\n - mountPath: /var/www:Z\n name: www\n - mountPath: /var/httpd-create:Z\n name: create\n workingDir: /var/www\n volumes:\n - name: www\n hostPath:\n path: \"/tmp/lsr_od4netlk_podman/httpd3\"\n - name: create\n hostPath:\n path: \"/tmp/lsr_od4netlk_podman/httpd3-create\"" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Saturday 07 March 2026 11:46:19 -0500 (0:00:00.043) 0:03:04.938 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "httpd3" }, "spec": { "containers": [ { "command": [ "/bin/busybox-extras", "httpd", "-f", "-p", 80 ], "image": "quay.io/libpod/testimage:20210610", "name": "httpd3", "ports": [ { "containerPort": 80, "hostPort": 15003 } ], "volumeMounts": [ { "mountPath": "/var/www:Z", "name": "www" }, { "mountPath": "/var/httpd-create:Z", "name": "create" } ], "workingDir": "/var/www" } ], "volumes": [ { "hostPath": { "path": "/tmp/lsr_od4netlk_podman/httpd3" }, "name": "www" }, { "hostPath": { "path": "/tmp/lsr_od4netlk_podman/httpd3-create" }, "name": "create" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Saturday 07 March 2026 11:46:19 -0500 (0:00:00.074) 0:03:05.013 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_name": "httpd3", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:42 Saturday 07 March 2026 11:46:19 -0500 (0:00:00.044) 0:03:05.057 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:10 Saturday 07 March 2026 11:46:19 -0500 (0:00:00.043) 0:03:05.100 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_handle_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:17 Saturday 07 March 2026 11:46:19 -0500 (0:00:00.031) 0:03:05.132 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_handle_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:24 Saturday 07 March 2026 11:46:19 -0500 (0:00:00.031) 0:03:05.164 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 07 March 2026 11:46:19 -0500 (0:00:00.039) 0:03:05.204 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1772901838.6646128, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "9117e8a5afa3220d98f04938893af461a8e3008b", "ctime": 1772901831.1052737, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9335075, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1771804800.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1635770157", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.497) 0:03:05.701 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.045) 0:03:05.747 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.044) 0:03:05.792 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_handle_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:73 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.045) 0:03:05.837 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:78 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.045) 0:03:05.882 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:83 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.037) 0:03:05.919 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:93 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.030) 0:03:05.949 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:100 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.032) 0:03:05.982 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:48 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.031) 0:03:06.013 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube is none or __podman_kube | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:57 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.046) 0:03:06.060 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_systemd_scope": "system", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.045) 0:03:06.105 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/etc/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:70 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.035) 0:03:06.140 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/httpd3.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:74 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.032) 0:03:06.173 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:81 Saturday 07 March 2026 11:46:20 -0500 (0:00:00.030) 0:03:06.203 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "systemd-escape", "--template", "podman-kube@.service", "/etc/containers/ansible-kubernetes.d/httpd3.yml" ], "delta": "0:00:00.005922", "end": "2026-03-07 11:46:21.024745", "rc": 0, "start": "2026-03-07 11:46:21.018823" } STDOUT: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:89 Saturday 07 March 2026 11:46:21 -0500 (0:00:00.406) 0:03:06.610 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:93 Saturday 07 March 2026 11:46:21 -0500 (0:00:00.039) 0:03:06.649 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2 Saturday 07 March 2026 11:46:21 -0500 (0:00:00.087) 0:03:06.737 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:13 Saturday 07 March 2026 11:46:21 -0500 (0:00:00.066) 0:03:06.803 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 07 March 2026 11:46:21 -0500 (0:00:00.039) 0:03:06.842 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 07 March 2026 11:46:21 -0500 (0:00:00.040) 0:03:06.883 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the host mount volumes] *********** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7 Saturday 07 March 2026 11:46:21 -0500 (0:00:00.038) 0:03:06.921 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_volumes": [ "/tmp/lsr_od4netlk_podman/httpd3", "/tmp/lsr_od4netlk_podman/httpd3-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18 Saturday 07 March 2026 11:46:21 -0500 (0:00:00.072) 0:03:06.994 ******** ok: [managed-node2] => (item=/tmp/lsr_od4netlk_podman/httpd3) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "/tmp/lsr_od4netlk_podman/httpd3", "mode": "0755", "owner": "root", "path": "/tmp/lsr_od4netlk_podman/httpd3", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 23, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=/tmp/lsr_od4netlk_podman/httpd3-create) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/lsr_od4netlk_podman/httpd3-create", "mode": "0755", "owner": "root", "path": "/tmp/lsr_od4netlk_podman/httpd3-create", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29 Saturday 07 March 2026 11:46:22 -0500 (0:00:00.784) 0:03:07.779 ******** included: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml:2 Saturday 07 March 2026 11:46:22 -0500 (0:00:00.096) 0:03:07.875 ******** ok: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle images when not booted] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml:25 Saturday 07 March 2026 11:46:23 -0500 (0:00:00.947) 0:03:08.823 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_booted", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check the kubernetes yaml file] ******* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:32 Saturday 07 March 2026 11:46:23 -0500 (0:00:00.040) 0:03:08.863 ******** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Ensure the kubernetes directory is present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:40 Saturday 07 March 2026 11:46:23 -0500 (0:00:00.401) 0:03:09.265 ******** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/ansible-kubernetes.d", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 24, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure kubernetes yaml files are present] *** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:51 Saturday 07 March 2026 11:46:24 -0500 (0:00:00.398) 0:03:09.663 ******** changed: [managed-node2] => { "changed": true, "checksum": "4ea4a304b347a6aaa397596e57cb6db94ea16b46", "dest": "/etc/containers/ansible-kubernetes.d/httpd3.yml", "gid": 0, "group": "root", "md5sum": "aeab814ae0b9924c37f21a0c0ffed22f", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 762, "src": "/root/.ansible/tmp/ansible-tmp-1772901984.2287433-15536-81376403194330/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Update containers/pods] *************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:63 Saturday 07 March 2026 11:46:24 -0500 (0:00:00.727) 0:03:10.390 ******** changed: [managed-node2] => { "actions": [ "/usr/bin/podman play kube --start=true /etc/containers/ansible-kubernetes.d/httpd3.yml" ], "changed": true } STDOUT: Pod: 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d Container: fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:72 Saturday 07 March 2026 11:46:25 -0500 (0:00:00.822) 0:03:11.213 ******** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Enable service] *********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:85 Saturday 07 March 2026 11:46:26 -0500 (0:00:00.849) 0:03:12.063 ******** changed: [managed-node2] => { "changed": true, "enabled": true, "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target -.mount network-online.target systemd-journald.socket basic.target \"system-podman\\\\x2dkube.slice\"", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "\"man:podman-kube-play(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "Environment": "\"PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-httpd3.yml.service\"", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/httpd3.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/httpd3.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/httpd3.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/httpd3.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3014914048", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-httpd3.yml.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target -.mount \"system-podman\\\\x2dkube.slice\"", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 10s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:98 Saturday 07 March 2026 11:46:27 -0500 (0:00:00.823) 0:03:12.886 ******** changed: [managed-node2] => { "changed": true, "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "\"system-podman\\\\x2dkube.slice\" -.mount network-online.target systemd-journald.socket sysinit.target basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "multi-user.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "\"man:podman-kube-play(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630936064", "EffectiveMemoryMax": "3630936064", "EffectiveTasksMax": "21802", "Environment": "\"PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-httpd3.yml.service\"", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/httpd3.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/httpd3.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/httpd3.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/httpd3.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3022934016", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-httpd3.yml.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target -.mount \"system-podman\\\\x2dkube.slice\"", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 10s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:113 Saturday 07 March 2026 11:46:38 -0500 (0:00:11.216) 0:03:24.103 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Saturday 07 March 2026 11:46:38 -0500 (0:00:00.029) 0:03:24.132 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Saturday 07 March 2026 11:46:38 -0500 (0:00:00.025) 0:03:24.158 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:205 Saturday 07 March 2026 11:46:38 -0500 (0:00:00.021) 0:03:24.180 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:214 Saturday 07 March 2026 11:46:38 -0500 (0:00:00.031) 0:03:24.211 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check if pods are running] *********************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:302 Saturday 07 March 2026 11:46:38 -0500 (0:00:00.047) 0:03:24.258 ******** failed: [managed-node2] (item=['httpd1', 'podman_basic_user', 3001]) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "podman", "pod", "inspect", "httpd1", "--format", "{{.State}}" ], "delta": "0:00:00.125389", "end": "2026-03-07 11:46:39.284510", "failed_when_result": true, "item": [ "httpd1", "podman_basic_user", 3001 ], "rc": 125, "start": "2026-03-07 11:46:39.159121" } STDERR: Error: no such pod httpd1 MSG: non-zero return code failed: [managed-node2] (item=['httpd2', 'root', 0]) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "podman", "pod", "inspect", "httpd2", "--format", "{{.State}}" ], "delta": "0:00:00.026013", "end": "2026-03-07 11:46:39.672378", "failed_when_result": true, "item": [ "httpd2", "root", 0 ], "rc": 125, "start": "2026-03-07 11:46:39.646365" } STDERR: Error: no such pod httpd2 MSG: non-zero return code failed: [managed-node2] (item=['httpd3', 'root', 0]) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "podman", "pod", "inspect", "httpd3", "--format", "{{.State}}" ], "delta": "0:00:08.668131", "end": "2026-03-07 11:46:48.695788", "failed_when_result": true, "item": [ "httpd3", "root", 0 ], "rc": 125, "start": "2026-03-07 11:46:40.027657" } STDERR: Error: no such pod httpd3 MSG: non-zero return code TASK [Dump journal] ************************************************************ task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:457 Saturday 07 March 2026 11:46:48 -0500 (0:00:10.018) 0:03:34.276 ******** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.035089", "end": "2026-03-07 11:46:49.106342", "failed_when_result": true, "rc": 0, "start": "2026-03-07 11:46:49.071253" } STDOUT: Mar 07 11:43:59 managed-node2 python3.12[13345]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:00 managed-node2 python3.12[13500]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:00 managed-node2 python3.12[13655]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:44:01 managed-node2 python3.12[13811]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:02 managed-node2 python3.12[13967]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Mar 07 11:44:03 managed-node2 python3.12[14124]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Mar 07 11:44:03 managed-node2 systemd[1]: Reload requested from client PID 14127 ('systemctl') (unit session-7.scope)... Mar 07 11:44:03 managed-node2 systemd[1]: Reloading... Mar 07 11:44:03 managed-node2 systemd-rc-local-generator[14179]: /etc/rc.d/rc.local is not marked executable, skipping. Mar 07 11:44:03 managed-node2 systemd[1]: Reloading finished in 205 ms. Mar 07 11:44:03 managed-node2 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 1651. Mar 07 11:44:04 managed-node2 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 1651. Mar 07 11:44:05 managed-node2 python3.12[14387]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Mar 07 11:44:05 managed-node2 python3.12[14542]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:06 managed-node2 python3.12[14697]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:06 managed-node2 python3.12[14852]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:07 managed-node2 python3.12[15008]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:08 managed-node2 python3.12[15164]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:08 managed-node2 dbus-broker-launch[739]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Mar 07 11:44:08 managed-node2 dbus-broker-launch[739]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Mar 07 11:44:08 managed-node2 systemd[1]: Started run-p15170-i15171.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p15170-i15171.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p15170-i15171.service has finished successfully. ░░ ░░ The job identifier is 1737. Mar 07 11:44:08 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1815. Mar 07 11:44:09 managed-node2 python3.12[15332]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Mar 07 11:44:09 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Mar 07 11:44:09 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1815. Mar 07 11:44:09 managed-node2 systemd[1]: run-p15170-i15171.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p15170-i15171.service has successfully entered the 'dead' state. Mar 07 11:44:10 managed-node2 python3.12[15520]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:44:11 managed-node2 python3.12[15676]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Mar 07 11:44:12 managed-node2 kernel: SELinux: Converting 500 SID table entries... Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability open_perms=1 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability always_check_network=0 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Mar 07 11:44:12 managed-node2 python3.12[15835]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Mar 07 11:44:16 managed-node2 python3.12[15990]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:17 managed-node2 python3.12[16147]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:17 managed-node2 python3.12[16302]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:44:17 managed-node2 python3.12[16457]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Mar 07 11:44:18 managed-node2 python3.12[16582]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901857.642029-10578-203368904599909/.source.yml _original_basename=.e_b4ms88 follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:44:18 managed-node2 python3.12[16737]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:44:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-compat2437484652-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat2437484652-merged.mount has successfully entered the 'dead' state. Mar 07 11:44:18 managed-node2 kernel: evm: overlay not supported Mar 07 11:44:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck1662577311-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck1662577311-merged.mount has successfully entered the 'dead' state. Mar 07 11:44:18 managed-node2 podman[16744]: 2026-03-07 11:44:18.827309846 -0500 EST m=+0.070462949 system refresh Mar 07 11:44:18 managed-node2 podman[16744]: 2026-03-07 11:44:18.828665684 -0500 EST m=+0.071818888 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge) Mar 07 11:44:18 managed-node2 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 1894. Mar 07 11:44:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice - cgroup machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice. ░░ Subject: A start job for unit machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice has finished successfully. ░░ ░░ The job identifier is 1893. Mar 07 11:44:18 managed-node2 podman[16744]: 2026-03-07 11:44:18.87970198 -0500 EST m=+0.122855095 container create 246afbb22b17d10477ddd5d5c90f2d7d06c004c92b0b0defa7b3a3a43e4ecbe5 (image=, name=992c9586519a-infra, pod_id=992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09) Mar 07 11:44:18 managed-node2 podman[16744]: 2026-03-07 11:44:18.883851527 -0500 EST m=+0.127004609 pod create 992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09 (image=, name=nopull) Mar 07 11:44:19 managed-node2 podman[16744]: 2026-03-07 11:44:19.68863062 -0500 EST m=+0.931783734 container create d4fc0055deaed372cb505f1296fe8e33f059d4ac3adf6ad0c54243b547cbb4c4 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z) Mar 07 11:44:19 managed-node2 podman[16744]: 2026-03-07 11:44:19.66621481 -0500 EST m=+0.909368011 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:44:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:44:22 managed-node2 python3.12[17081]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:44:22 managed-node2 python3.12[17242]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:24 managed-node2 python3.12[17399]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:25 managed-node2 python3.12[17555]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Mar 07 11:44:26 managed-node2 python3.12[17712]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Mar 07 11:44:27 managed-node2 python3.12[17869]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Mar 07 11:44:28 managed-node2 python3.12[18024]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:29 managed-node2 python3.12[18180]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:30 managed-node2 python3.12[18336]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:31 managed-node2 python3.12[18492]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Mar 07 11:44:32 managed-node2 python3.12[18676]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Mar 07 11:44:33 managed-node2 python3.12[18831]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Mar 07 11:44:36 managed-node2 python3.12[18986]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:44:38 managed-node2 podman[19151]: 2026-03-07 11:44:38.376455047 -0500 EST m=+0.166868827 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest unable to copy from source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Mar 07 11:44:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:44:38 managed-node2 python3.12[19313]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:39 managed-node2 python3.12[19468]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:44:39 managed-node2 python3.12[19623]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Mar 07 11:44:39 managed-node2 python3.12[19748]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901879.29425-11520-176634290947885/.source.yml _original_basename=.gt65xr01 follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:44:40 managed-node2 python3.12[19903]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:44:40 managed-node2 podman[19910]: 2026-03-07 11:44:40.341496717 -0500 EST m=+0.013739324 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge) Mar 07 11:44:40 managed-node2 systemd[1]: Created slice machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice - cgroup machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice. ░░ Subject: A start job for unit machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice has finished successfully. ░░ ░░ The job identifier is 1899. Mar 07 11:44:40 managed-node2 podman[19910]: 2026-03-07 11:44:40.376773579 -0500 EST m=+0.049016203 container create 1e62cdef56136721315b848a501f951852d27e5af5ee669a7ef1724aa57fbf3a (image=, name=a7c38d962220-infra, pod_id=a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0) Mar 07 11:44:40 managed-node2 podman[19910]: 2026-03-07 11:44:40.381063787 -0500 EST m=+0.053306319 pod create a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0 (image=, name=bogus) Mar 07 11:44:40 managed-node2 podman[19910]: 2026-03-07 11:44:40.531228812 -0500 EST m=+0.203471494 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest unable to copy from source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Mar 07 11:44:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:44:42 managed-node2 python3.12[20227]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:44:43 managed-node2 python3.12[20389]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:45 managed-node2 python3.12[20546]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:46 managed-node2 python3.12[20702]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Mar 07 11:44:47 managed-node2 python3.12[20859]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Mar 07 11:44:48 managed-node2 python3.12[21016]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Mar 07 11:44:49 managed-node2 python3.12[21171]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:50 managed-node2 python3.12[21327]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:51 managed-node2 python3.12[21483]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:52 managed-node2 python3.12[21639]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Mar 07 11:44:53 managed-node2 python3.12[21823]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Mar 07 11:44:54 managed-node2 python3.12[21978]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Mar 07 11:44:57 managed-node2 python3.12[22133]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:58 managed-node2 python3.12[22290]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:44:58 managed-node2 python3.12[22446]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Mar 07 11:44:59 managed-node2 python3.12[22603]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:59 managed-node2 python3.12[22760]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:44:59 managed-node2 python3.12[22760]: ansible-containers.podman.podman_play version: 5.8.0, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Mar 07 11:44:59 managed-node2 podman[22767]: 2026-03-07 11:44:59.831024987 -0500 EST m=+0.021885022 pod stop 992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09 (image=, name=nopull) Mar 07 11:44:59 managed-node2 systemd[1]: Removed slice machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice - cgroup machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice. ░░ Subject: A stop job for unit machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice has finished. ░░ ░░ The job identifier is 1905 and the job result is done. Mar 07 11:44:59 managed-node2 podman[22767]: 2026-03-07 11:44:59.862282915 -0500 EST m=+0.053142851 container remove d4fc0055deaed372cb505f1296fe8e33f059d4ac3adf6ad0c54243b547cbb4c4 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Mar 07 11:44:59 managed-node2 podman[22767]: 2026-03-07 11:44:59.882439363 -0500 EST m=+0.073299300 container remove 246afbb22b17d10477ddd5d5c90f2d7d06c004c92b0b0defa7b3a3a43e4ecbe5 (image=, name=992c9586519a-infra, pod_id=992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09) Mar 07 11:44:59 managed-node2 podman[22767]: 2026-03-07 11:44:59.88990995 -0500 EST m=+0.080769856 pod remove 992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09 (image=, name=nopull) Mar 07 11:44:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:45:00 managed-node2 python3.12[22931]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:45:00 managed-node2 python3.12[23086]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:00 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:45:03 managed-node2 python3.12[23403]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:03 managed-node2 python3.12[23564]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:45:05 managed-node2 python3.12[23721]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:06 managed-node2 python3.12[23877]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Mar 07 11:45:07 managed-node2 python3.12[24034]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Mar 07 11:45:08 managed-node2 python3.12[24191]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Mar 07 11:45:09 managed-node2 python3.12[24346]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:10 managed-node2 python3.12[24502]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:11 managed-node2 python3.12[24658]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:11 managed-node2 python3.12[24814]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Mar 07 11:45:13 managed-node2 python3.12[24998]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Mar 07 11:45:13 managed-node2 python3.12[25153]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Mar 07 11:45:17 managed-node2 python3.12[25308]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:45:18 managed-node2 python3.12[25465]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:19 managed-node2 python3.12[25622]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Mar 07 11:45:19 managed-node2 python3.12[25779]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:45:20 managed-node2 python3.12[25936]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:45:20 managed-node2 python3.12[25936]: ansible-containers.podman.podman_play version: 5.8.0, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Mar 07 11:45:20 managed-node2 podman[25943]: 2026-03-07 11:45:20.102000803 -0500 EST m=+0.020362137 pod stop a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0 (image=, name=bogus) Mar 07 11:45:20 managed-node2 systemd[1]: Removed slice machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice - cgroup machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice. ░░ Subject: A stop job for unit machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice has finished. ░░ ░░ The job identifier is 1907 and the job result is done. Mar 07 11:45:20 managed-node2 podman[25943]: 2026-03-07 11:45:20.136901485 -0500 EST m=+0.055262816 container remove 1e62cdef56136721315b848a501f951852d27e5af5ee669a7ef1724aa57fbf3a (image=, name=a7c38d962220-infra, pod_id=a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0) Mar 07 11:45:20 managed-node2 podman[25943]: 2026-03-07 11:45:20.143954123 -0500 EST m=+0.062315426 pod remove a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0 (image=, name=bogus) Mar 07 11:45:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:45:20 managed-node2 python3.12[26107]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:45:20 managed-node2 python3.12[26262]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:45:23 managed-node2 python3.12[26580]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:24 managed-node2 python3.12[26741]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:45:27 managed-node2 python3.12[26898]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:28 managed-node2 python3.12[27054]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Mar 07 11:45:28 managed-node2 python3.12[27211]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Mar 07 11:45:29 managed-node2 python3.12[27368]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Mar 07 11:45:30 managed-node2 python3.12[27523]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:31 managed-node2 python3.12[27679]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:32 managed-node2 python3.12[27835]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:33 managed-node2 python3.12[27991]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Mar 07 11:45:34 managed-node2 python3.12[28175]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Mar 07 11:45:35 managed-node2 python3.12[28330]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Mar 07 11:45:39 managed-node2 python3.12[28486]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Mar 07 11:45:39 managed-node2 python3.12[28642]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:45:39 managed-node2 python3.12[28799]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:40 managed-node2 python3.12[28955]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:41 managed-node2 python3.12[29111]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:41 managed-node2 python3.12[29267]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Mar 07 11:45:41 managed-node2 systemd[1]: Created slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A start job for unit user-3001.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-3001.slice has finished successfully. ░░ ░░ The job identifier is 1987. Mar 07 11:45:41 managed-node2 systemd[1]: Starting user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A start job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 1909. Mar 07 11:45:41 managed-node2 systemd[1]: Finished user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A start job for unit user-runtime-dir@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has finished successfully. ░░ ░░ The job identifier is 1909. Mar 07 11:45:41 managed-node2 systemd[1]: Starting user@3001.service - User Manager for UID 3001... ░░ Subject: A start job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 1989. Mar 07 11:45:41 managed-node2 systemd-logind[759]: New session 9 of user podman_basic_user. ░░ Subject: A new session 9 has been created for user podman_basic_user ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 9 has been created for the user podman_basic_user. ░░ ░░ The leading process of the session is 29271. Mar 07 11:45:41 managed-node2 (systemd)[29271]: pam_unix(systemd-user:session): session opened for user podman_basic_user(uid=3001) by podman_basic_user(uid=0) Mar 07 11:45:41 managed-node2 systemd[29271]: Queued start job for default target default.target. Mar 07 11:45:41 managed-node2 systemd[29271]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Mar 07 11:45:41 managed-node2 systemd[29271]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Mar 07 11:45:41 managed-node2 systemd[29271]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Mar 07 11:45:41 managed-node2 systemd[29271]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Mar 07 11:45:41 managed-node2 systemd[29271]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Mar 07 11:45:41 managed-node2 systemd[29271]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 6. Mar 07 11:45:41 managed-node2 systemd[29271]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Mar 07 11:45:41 managed-node2 systemd[29271]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Mar 07 11:45:41 managed-node2 systemd[29271]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Mar 07 11:45:41 managed-node2 systemd[29271]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Mar 07 11:45:41 managed-node2 systemd[29271]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Mar 07 11:45:41 managed-node2 systemd[29271]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Mar 07 11:45:41 managed-node2 systemd[29271]: Startup finished in 67ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 67799 microseconds. Mar 07 11:45:41 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 1989. Mar 07 11:45:42 managed-node2 python3.12[29442]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:45:42 managed-node2 python3.12[29597]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:45:43 managed-node2 sudo[29802]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xhibmgzsvzhrmfekymwxsnjugtcfcwcf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901942.8189244-14095-168859344459604/AnsiballZ_podman_image.py' Mar 07 11:45:43 managed-node2 sudo[29802]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Mar 07 11:45:43 managed-node2 kernel: catatonit[29817]: segfault at a9b80 ip 00007fdf17f46dbb sp 00007fffc0c93fa0 error 4 in catatonit[4dbb,7fdf17f43000+77000] likely on CPU 0 (core 0, socket 0) Mar 07 11:45:43 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:43 managed-node2 systemd-coredump[29824]: Process 29817 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:43 managed-node2 systemd[1]: Created slice system-systemd\x2dcoredump.slice - Slice /system/systemd-coredump. ░░ Subject: A start job for unit system-systemd\x2dcoredump.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-systemd\x2dcoredump.slice has finished successfully. ░░ ░░ The job identifier is 2075. Mar 07 11:45:43 managed-node2 systemd[1]: Started systemd-coredump@0-29824-0.service - Process Core Dump (PID 29824/UID 0). ░░ Subject: A start job for unit systemd-coredump@0-29824-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@0-29824-0.service has finished successfully. ░░ ░░ The job identifier is 2070. Mar 07 11:45:43 managed-node2 systemd[29271]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Mar 07 11:45:43 managed-node2 systemd[29271]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Mar 07 11:45:43 managed-node2 systemd[29271]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Mar 07 11:45:43 managed-node2 dbus-broker-launch[29829]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Mar 07 11:45:43 managed-node2 dbus-broker-launch[29829]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Mar 07 11:45:43 managed-node2 systemd-coredump[29826]: Resource limits disable core dumping for process 29817 (catatonit). Mar 07 11:45:43 managed-node2 systemd-coredump[29826]: Process 29817 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 29817 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 29817 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:43 managed-node2 systemd[1]: systemd-coredump@0-29824-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@0-29824-0.service has successfully entered the 'dead' state. Mar 07 11:45:43 managed-node2 dbus-broker-launch[29829]: Ready Mar 07 11:45:43 managed-node2 systemd[29271]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Mar 07 11:45:43 managed-node2 systemd[29271]: Started podman-29812.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Mar 07 11:45:43 managed-node2 kernel: catatonit[29846]: segfault at a9b80 ip 00007ff7160afdbb sp 00007fff8cfc2e40 error 4 in catatonit[4dbb,7ff7160ac000+77000] likely on CPU 0 (core 0, socket 0) Mar 07 11:45:43 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:43 managed-node2 systemd-coredump[29853]: Process 29846 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:43 managed-node2 systemd[1]: Started systemd-coredump@1-29853-0.service - Process Core Dump (PID 29853/UID 0). ░░ Subject: A start job for unit systemd-coredump@1-29853-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@1-29853-0.service has finished successfully. ░░ ░░ The job identifier is 2079. Mar 07 11:45:43 managed-node2 systemd[29271]: Started podman-29841.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Mar 07 11:45:43 managed-node2 systemd-coredump[29856]: Resource limits disable core dumping for process 29846 (catatonit). Mar 07 11:45:43 managed-node2 systemd-coredump[29856]: Process 29846 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 29846 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 29846 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:43 managed-node2 systemd[1]: systemd-coredump@1-29853-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@1-29853-0.service has successfully entered the 'dead' state. Mar 07 11:45:44 managed-node2 kernel: catatonit[29891]: segfault at a9b80 ip 00007fc382ae6dbb sp 00007fff833112d0 error 4 in catatonit[4dbb,7fc382ae3000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:44 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:44 managed-node2 systemd-coredump[29898]: Process 29891 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:44 managed-node2 systemd[1]: Started systemd-coredump@2-29898-0.service - Process Core Dump (PID 29898/UID 0). ░░ Subject: A start job for unit systemd-coredump@2-29898-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@2-29898-0.service has finished successfully. ░░ ░░ The job identifier is 2088. Mar 07 11:45:44 managed-node2 systemd[29271]: Started podman-29886.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Mar 07 11:45:44 managed-node2 systemd[29271]: Started podman-pause-320fc37c.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Mar 07 11:45:44 managed-node2 systemd-coredump[29900]: Resource limits disable core dumping for process 29891 (catatonit). Mar 07 11:45:44 managed-node2 systemd-coredump[29900]: Process 29891 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 29891 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 29891 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:44 managed-node2 systemd[1]: systemd-coredump@2-29898-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@2-29898-0.service has successfully entered the 'dead' state. Mar 07 11:45:44 managed-node2 systemd[29271]: Started podman-29905.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Mar 07 11:45:44 managed-node2 kernel: catatonit[29927]: segfault at a9b80 ip 00007f18b4071dbb sp 00007fffc0fc07f0 error 4 in catatonit[4dbb,7f18b406e000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:44 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:44 managed-node2 systemd-coredump[29935]: Process 29927 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:44 managed-node2 systemd[1]: Started systemd-coredump@3-29935-0.service - Process Core Dump (PID 29935/UID 0). ░░ Subject: A start job for unit systemd-coredump@3-29935-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@3-29935-0.service has finished successfully. ░░ ░░ The job identifier is 2097. Mar 07 11:45:44 managed-node2 systemd[29271]: Started podman-29922.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Mar 07 11:45:44 managed-node2 systemd-coredump[29936]: Resource limits disable core dumping for process 29927 (catatonit). Mar 07 11:45:44 managed-node2 systemd-coredump[29936]: Process 29927 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 29927 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 29927 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:44 managed-node2 systemd[29271]: podman-pause-bf3bda8f.scope: Couldn't move process 29927 to requested cgroup '/user.slice/user-3001.slice/user@3001.service/user.slice/podman-pause-bf3bda8f.scope' (directly or via the system bus): No such process Mar 07 11:45:44 managed-node2 systemd[29271]: podman-pause-bf3bda8f.scope: Failed to add PIDs to scope's control group: Permission denied Mar 07 11:45:44 managed-node2 systemd[29271]: podman-pause-bf3bda8f.scope: Failed with result 'resources'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT has entered the 'failed' state with result 'resources'. Mar 07 11:45:44 managed-node2 systemd[1]: systemd-coredump@3-29935-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@3-29935-0.service has successfully entered the 'dead' state. Mar 07 11:45:44 managed-node2 systemd[29271]: Failed to start podman-pause-bf3bda8f.scope. ░░ Subject: A start job for unit UNIT has failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished with a failure. ░░ ░░ The job identifier is 43 and the job result is failed. Mar 07 11:45:44 managed-node2 kernel: catatonit[29953]: segfault at a9b80 ip 00007faeb6efadbb sp 00007ffc7421de50 error 4 in catatonit[4dbb,7faeb6ef7000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:44 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:44 managed-node2 systemd-coredump[29961]: Process 29953 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:44 managed-node2 systemd[1]: Started systemd-coredump@4-29961-0.service - Process Core Dump (PID 29961/UID 0). ░░ Subject: A start job for unit systemd-coredump@4-29961-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@4-29961-0.service has finished successfully. ░░ ░░ The job identifier is 2106. Mar 07 11:45:44 managed-node2 systemd[29271]: Started podman-29948.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Mar 07 11:45:44 managed-node2 systemd-coredump[29963]: Resource limits disable core dumping for process 29953 (catatonit). Mar 07 11:45:44 managed-node2 systemd-coredump[29963]: Process 29953 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 29953 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 29953 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:44 managed-node2 systemd[29271]: Started podman-pause-ed80bfc9.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Mar 07 11:45:44 managed-node2 systemd[1]: systemd-coredump@4-29961-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@4-29961-0.service has successfully entered the 'dead' state. Mar 07 11:45:44 managed-node2 sudo[29802]: pam_unix(sudo:session): session closed for user podman_basic_user Mar 07 11:45:44 managed-node2 python3.12[30123]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:45:45 managed-node2 python3.12[30278]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:45:45 managed-node2 python3.12[30433]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Mar 07 11:45:46 managed-node2 python3.12[30558]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901945.4660614-14184-34473031875573/.source.yml _original_basename=.8iqqb013 follow=False checksum=5a374c59230176d446e6cd38bcc64da326c45092 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:45:46 managed-node2 sudo[30763]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oerbzxzssklczavswqvqximbbvrtsmtu ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901946.1921344-14217-62861233194905/AnsiballZ_podman_play.py' Mar 07 11:45:46 managed-node2 sudo[30763]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Mar 07 11:45:46 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:45:46 managed-node2 kernel: catatonit[30786]: segfault at a9b80 ip 00007f77afc58dbb sp 00007fff81ee37a0 error 4 in catatonit[4dbb,7f77afc55000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:46 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:46 managed-node2 systemd-coredump[30794]: Process 30786 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:46 managed-node2 systemd[1]: Started systemd-coredump@5-30794-0.service - Process Core Dump (PID 30794/UID 0). ░░ Subject: A start job for unit systemd-coredump@5-30794-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@5-30794-0.service has finished successfully. ░░ ░░ The job identifier is 2115. Mar 07 11:45:46 managed-node2 systemd[29271]: Started podman-30780.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 55. Mar 07 11:45:46 managed-node2 systemd[29271]: Created slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice - cgroup user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 59. Mar 07 11:45:46 managed-node2 systemd-coredump[30796]: Resource limits disable core dumping for process 30786 (catatonit). Mar 07 11:45:46 managed-node2 systemd-coredump[30796]: Process 30786 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 30786 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 30786 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:46 managed-node2 systemd[1]: systemd-coredump@5-30794-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@5-30794-0.service has successfully entered the 'dead' state. Mar 07 11:45:46 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Mar 07 11:45:46 managed-node2 systemd[29271]: Started rootless-netns-8fa057a2.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 63. Mar 07 11:45:46 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Mar 07 11:45:46 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:45:46 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:45:46 managed-node2 kernel: veth0: entered allmulticast mode Mar 07 11:45:46 managed-node2 kernel: veth0: entered promiscuous mode Mar 07 11:45:46 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:45:46 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:45:46 managed-node2 systemd[29271]: Started run-p30834-i30835.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 67. Mar 07 11:45:46 managed-node2 aardvark-dns[30834]: starting aardvark on a child with pid 30835 Mar 07 11:45:46 managed-node2 aardvark-dns[30835]: Successfully parsed config Mar 07 11:45:46 managed-node2 aardvark-dns[30835]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Mar 07 11:45:46 managed-node2 aardvark-dns[30835]: Listen v6 ip {} Mar 07 11:45:46 managed-node2 aardvark-dns[30835]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Mar 07 11:45:46 managed-node2 conmon[30852]: conmon a532637f985bd7708dd5 : failed to write to /proc/self/oom_score_adj: Permission denied Mar 07 11:45:46 managed-node2 systemd[29271]: Started libpod-conmon-a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Mar 07 11:45:46 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/15/attach} Mar 07 11:45:46 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : terminal_ctrl_fd: 15 Mar 07 11:45:46 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : winsz read side: 18, winsz write side: 19 Mar 07 11:45:47 managed-node2 systemd[29271]: Started libpod-a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 76. Mar 07 11:45:47 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : container PID: 30855 Mar 07 11:45:47 managed-node2 kernel: catatonit[30855]: segfault at a9b80 ip 00007f9031569dbb sp 00007ffe6192e700 error 4 in catatonit[4dbb,7f9031566000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:47 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:47 managed-node2 systemd-coredump[30857]: Process 30855 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:47 managed-node2 systemd[1]: Started systemd-coredump@6-30857-0.service - Process Core Dump (PID 30857/UID 0). ░░ Subject: A start job for unit systemd-coredump@6-30857-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@6-30857-0.service has finished successfully. ░░ ░░ The job identifier is 2124. Mar 07 11:45:47 managed-node2 conmon[30859]: conmon ee4a1b77972d6a790be3 : failed to write to /proc/self/oom_score_adj: Permission denied Mar 07 11:45:47 managed-node2 systemd[29271]: Started libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 81. Mar 07 11:45:47 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Mar 07 11:45:47 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : terminal_ctrl_fd: 14 Mar 07 11:45:47 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : winsz read side: 17, winsz write side: 18 Mar 07 11:45:47 managed-node2 systemd[29271]: Started libpod-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Mar 07 11:45:47 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : container PID: 30863 Mar 07 11:45:47 managed-node2 systemd-coredump[30858]: Resource limits disable core dumping for process 30855 (catatonit). Mar 07 11:45:47 managed-node2 systemd-coredump[30858]: Process 30855 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 30855 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 30855 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:47 managed-node2 systemd[1]: systemd-coredump@6-30857-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@6-30857-0.service has successfully entered the 'dead' state. Mar 07 11:45:47 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : container 30855 exited with status 139 Mar 07 11:45:47 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Mar 07 11:45:47 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399 Container: ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 Mar 07 11:45:47 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2026-03-07T11:45:46-05:00" level=info msg="/bin/podman filtering at log level debug" time="2026-03-07T11:45:46-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2026-03-07T11:45:46-05:00" level=info msg="Setting parallel job count to 7" time="2026-03-07T11:45:46-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2026-03-07T11:45:46-05:00" level=info msg="Using sqlite as database backend" time="2026-03-07T11:45:46-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2026-03-07T11:45:46-05:00" level=debug msg="Using graph driver overlay" time="2026-03-07T11:45:46-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2026-03-07T11:45:46-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2026-03-07T11:45:46-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2026-03-07T11:45:46-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2026-03-07T11:45:46-05:00" level=debug msg="Using transient store: false" time="2026-03-07T11:45:46-05:00" level=debug msg="Not configuring container store" time="2026-03-07T11:45:46-05:00" level=debug msg="Initializing event backend file" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2026-03-07T11:45:46-05:00" level=info msg="Creating a new rootless user namespace" time="2026-03-07T11:45:46-05:00" level=info msg="/bin/podman filtering at log level debug" time="2026-03-07T11:45:46-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2026-03-07T11:45:46-05:00" level=info msg="Setting parallel job count to 7" time="2026-03-07T11:45:46-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2026-03-07T11:45:46-05:00" level=info msg="Using sqlite as database backend" time="2026-03-07T11:45:46-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2026-03-07T11:45:46-05:00" level=debug msg="Using graph driver overlay" time="2026-03-07T11:45:46-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2026-03-07T11:45:46-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2026-03-07T11:45:46-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2026-03-07T11:45:46-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2026-03-07T11:45:46-05:00" level=debug msg="Using transient store: false" time="2026-03-07T11:45:46-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2026-03-07T11:45:46-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2026-03-07T11:45:46-05:00" level=debug msg="Initializing event backend file" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Successfully loaded 1 networks" time="2026-03-07T11:45:46-05:00" level=debug msg="found free device name podman1" time="2026-03-07T11:45:46-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2026-03-07T11:45:46-05:00" level=debug msg="Pod using bridge network mode" time="2026-03-07T11:45:46-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice for parent user.slice and name libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399" time="2026-03-07T11:45:46-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:46-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:46-05:00" level=debug msg="no command or entrypoint provided, and no CMD or ENTRYPOINT from image: defaulting to empty string" time="2026-03-07T11:45:46-05:00" level=debug msg="using systemd mode: false" time="2026-03-07T11:45:46-05:00" level=debug msg="setting container name 46ab0de2b796-infra" time="2026-03-07T11:45:46-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 196e978a743fccc03fb8ddd2d41a1f9a15d160f55231f9844a9070e6a9ce61ba bridge podman1 2026-03-07 11:45:46.711313386 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2026-03-07T11:45:46-05:00" level=debug msg="Successfully loaded 2 networks" time="2026-03-07T11:45:46-05:00" level=debug msg="Allocated lock 1 for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2026-03-07T11:45:46-05:00" level=debug msg="Check for idmapped mounts support " time="2026-03-07T11:45:46-05:00" level=debug msg="Created container \"a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Container \"a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Container \"a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\" has run directory \"/run/user/3001/containers/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:45:46-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:45:46-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2026-03-07T11:45:46-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:45:46-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:45:46-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:45:46-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:45:46-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:45:46-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:45:46-05:00" level=debug msg="using systemd mode: false" time="2026-03-07T11:45:46-05:00" level=debug msg="adding container to pod httpd1" time="2026-03-07T11:45:46-05:00" level=debug msg="setting container name httpd1-httpd1" time="2026-03-07T11:45:46-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2026-03-07T11:45:46-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /proc" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /dev" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /dev/pts" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /dev/mqueue" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /sys" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2026-03-07T11:45:46-05:00" level=debug msg="Allocated lock 2 for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Created container \"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Container \"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Container \"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\" has run directory \"/run/user/3001/containers/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Strongconnecting node a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:46-05:00" level=debug msg="Pushed a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 onto stack" time="2026-03-07T11:45:46-05:00" level=debug msg="Finishing node a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431. Popped a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 off stack" time="2026-03-07T11:45:46-05:00" level=debug msg="Strongconnecting node ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431" time="2026-03-07T11:45:46-05:00" level=debug msg="Pushed ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 onto stack" time="2026-03-07T11:45:46-05:00" level=debug msg="Finishing node ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431. Popped ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 off stack" time="2026-03-07T11:45:46-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-dcd66955-fe96-f197-416b-aad9b87d86cb for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:46-05:00" level=debug msg="Created root filesystem for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/rootfs/merge" time="2026-03-07T11:45:46-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2026-03-07T11:45:46-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2026-03-07T11:45:46-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::bridge] Using mtu 65520 from default route interface for the network [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/ip_forward to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/route_localnet to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink_route] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_196e978a_10_89_0_0_nm24 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "d6:83:e5:9a:8f:77", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Starting parent driver\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport112898145/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport112898145/.bp.sock]\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=Ready\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport is ready" time="2026-03-07T11:45:46-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2026-03-07T11:45:46-05:00" level=debug msg="Setting Cgroups for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 to user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice:libpod:a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:46-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2026-03-07T11:45:46-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/rootfs/merge\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Created OCI spec for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/config.json" time="2026-03-07T11:45:46-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice for parent user.slice and name libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399" time="2026-03-07T11:45:46-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:46-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:46-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2026-03-07T11:45:46-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 -u a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata -p /run/user/3001/containers/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/pidfile -n 46ab0de2b796-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431]" time="2026-03-07T11:45:46-05:00" level=info msg="Running conmon under slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice and unitName libpod-conmon-a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2026-03-07T11:45:47-05:00" level=debug msg="Received: 30855" time="2026-03-07T11:45:47-05:00" level=info msg="Got Conmon PID as 30853" time="2026-03-07T11:45:47-05:00" level=debug msg="Created container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 in OCI runtime" time="2026-03-07T11:45:47-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2026-03-07T11:45:47-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2026-03-07T11:45:47-05:00" level=debug msg="Starting container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 with command [/catatonit -P]" time="2026-03-07T11:45:47-05:00" level=debug msg="Started container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:47-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/JH6ZGMTVFTMNRQA4DOPAJEHHJA,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c60,c376\"" time="2026-03-07T11:45:47-05:00" level=debug msg="Mounted container \"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/merged\"" time="2026-03-07T11:45:47-05:00" level=debug msg="Created root filesystem for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 at /home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/merged" time="2026-03-07T11:45:47-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2026-03-07T11:45:47-05:00" level=debug msg="Setting Cgroups for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 to user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice:libpod:ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431" time="2026-03-07T11:45:47-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2026-03-07T11:45:47-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2026-03-07T11:45:47-05:00" level=debug msg="Created OCI spec for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/config.json" time="2026-03-07T11:45:47-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice for parent user.slice and name libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399" time="2026-03-07T11:45:47-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:47-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:47-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2026-03-07T11:45:47-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 -u ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata -p /run/user/3001/containers/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431]" time="2026-03-07T11:45:47-05:00" level=info msg="Running conmon under slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice and unitName libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2026-03-07T11:45:47-05:00" level=debug msg="Received: 30863" time="2026-03-07T11:45:47-05:00" level=info msg="Got Conmon PID as 30861" time="2026-03-07T11:45:47-05:00" level=debug msg="Created container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 in OCI runtime" time="2026-03-07T11:45:47-05:00" level=debug msg="Starting container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 with command [/bin/busybox-extras httpd -f -p 80]" time="2026-03-07T11:45:47-05:00" level=debug msg="Started container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431" time="2026-03-07T11:45:47-05:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2026-03-07T11:45:47-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=30780 time="2026-03-07T11:45:47-05:00" level=debug msg="Shutting down engines" Mar 07 11:45:47 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431)" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="Setting parallel job count to 7" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="Using sqlite as database backend" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="systemd-logind: Unknown object '/'." Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using graph driver overlay" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using run root /run/user/3001/containers" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using transient store: false" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Cached value indicated that native-diff is usable" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Initializing event backend file" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Mar 07 11:45:47 managed-node2 sudo[30763]: pam_unix(sudo:session): session closed for user podman_basic_user Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Cleaning up container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Tearing down network namespace at /run/user/3001/netns/netns-dcd66955-fe96-f197-416b-aad9b87d86cb for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 196e978a743fccc03fb8ddd2d41a1f9a15d160f55231f9844a9070e6a9ce61ba bridge podman1 2026-03-07 11:45:46.711313386 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Successfully loaded 2 networks" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Tearing down..\n" Mar 07 11:45:47 managed-node2 aardvark-dns[30835]: Received SIGHUP Mar 07 11:45:47 managed-node2 aardvark-dns[30835]: Successfully parsed config Mar 07 11:45:47 managed-node2 aardvark-dns[30835]: Listen v4 ip {} Mar 07 11:45:47 managed-node2 aardvark-dns[30835]: Listen v6 ip {} Mar 07 11:45:47 managed-node2 aardvark-dns[30835]: No configuration found stopping the sever Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [INFO netavark::firewall] Using nftables firewall driver\n" Mar 07 11:45:47 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:45:47 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Mar 07 11:45:47 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Mar 07 11:45:47 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [INFO netavark::network::bridge] removing bridge podman1\n" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"INPUT\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(Meta(Meta { key: L4proto })), right: Named(Set([Element(String(\"tcp\")), Element(String(\"udp\"))])), op: EQ }), Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"th\", field: \"dport\" }))), right: Number(53), op: EQ }), Accept(None)], handle: Some(23), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"daddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(CT(CT { key: \"state\", family: None, dir: None })), right: List([String(\"established\"), String(\"related\")]), op: IN }), Accept(None)], handle: Some(24), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Accept(None)], handle: Some(25), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"POSTROUTING\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Jump(JumpTarget { target: \"nv_196e978a_10_89_0_0_nm24\" })], handle: Some(26), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 4 rules\n[DEBUG netavark::firewall::nft] Found chain nv_196e978a_10_89_0_0_nm24\n" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"NETAVARK-ISOLATION-3\", expr: [Match(Match { left: Named(Meta(Meta { key: Oifname })), right: String(\"podman1\"), op: EQ }), Drop(None)], handle: Some(17), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 1 isolation rules for network\n" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Found chain nv_196e978a_10_89_0_0_nm24_dnat\n" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Found chain nv_196e978a_10_89_0_0_nm24_dnat\n" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Teardown complete\n" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Cleaning up rootless network namespace" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Successfully cleaned up container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431)" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Shutting down engines" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=30868 Mar 07 11:45:47 managed-node2 sudo[31091]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dogisxyjglvsrovdvtkjavoxxllbgpoh ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901947.3123984-14267-185518984212991/AnsiballZ_systemd.py' Mar 07 11:45:47 managed-node2 sudo[31091]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Mar 07 11:45:47 managed-node2 python3.12[31094]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Mar 07 11:45:47 managed-node2 systemd[29271]: Reload requested from client PID 31095 ('systemctl')... Mar 07 11:45:47 managed-node2 systemd[29271]: Reloading... Mar 07 11:45:47 managed-node2 systemd[29271]: Reloading finished in 42 ms. Mar 07 11:45:47 managed-node2 sudo[31091]: pam_unix(sudo:session): session closed for user podman_basic_user Mar 07 11:45:48 managed-node2 sudo[31309]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rictksfalktnaehelhfnnbscqeskopdj ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901947.9564757-14297-244281299619627/AnsiballZ_systemd.py' Mar 07 11:45:48 managed-node2 sudo[31309]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Mar 07 11:45:48 managed-node2 python3.12[31312]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Mar 07 11:45:48 managed-node2 systemd[29271]: Reload requested from client PID 31315 ('systemctl')... Mar 07 11:45:48 managed-node2 systemd[29271]: Reloading... Mar 07 11:45:48 managed-node2 systemd[29271]: Reloading finished in 39 ms. Mar 07 11:45:48 managed-node2 sudo[31309]: pam_unix(sudo:session): session closed for user podman_basic_user Mar 07 11:45:48 managed-node2 sudo[31529]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nuvmtvywssfvscqhqidkfbcugrldioki ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901948.6670218-14329-160015858227475/AnsiballZ_systemd.py' Mar 07 11:45:48 managed-node2 sudo[31529]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Mar 07 11:45:49 managed-node2 python3.12[31532]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Mar 07 11:45:49 managed-node2 systemd[29271]: Created slice app-podman\x2dkube.slice - Slice /app/podman-kube. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 103. Mar 07 11:45:49 managed-node2 systemd[29271]: Starting podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 91. Mar 07 11:45:49 managed-node2 kernel: catatonit[31544]: segfault at a9b80 ip 00007f1befc8ddbb sp 00007ffc46cfc240 error 4 in catatonit[4dbb,7f1befc8a000+77000] likely on CPU 0 (core 0, socket 0) Mar 07 11:45:49 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:49 managed-node2 systemd-coredump[31550]: Process 31544 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:49 managed-node2 systemd[1]: Started systemd-coredump@7-31550-0.service - Process Core Dump (PID 31550/UID 0). ░░ Subject: A start job for unit systemd-coredump@7-31550-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@7-31550-0.service has finished successfully. ░░ ░░ The job identifier is 2135. Mar 07 11:45:49 managed-node2 systemd-coredump[31553]: Process 31544 (catatonit) of user 3001 dumped core. Module /usr/libexec/catatonit/catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 31544: #0 0x00007f1befc8ddbb __libc_setup_tls (/usr/libexec/catatonit/catatonit + 0x4dbb) #1 0x00007f1befc8da79 __libc_start_main_impl (/usr/libexec/catatonit/catatonit + 0x4a79) #2 0x00007f1befc8b4e5 _start (/usr/libexec/catatonit/catatonit + 0x24e5) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 31544 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 31544 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:49 managed-node2 systemd[1]: systemd-coredump@7-31550-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@7-31550-0.service has successfully entered the 'dead' state. Mar 07 11:45:59 managed-node2 podman[31542]: time="2026-03-07T11:45:59-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Mar 07 11:45:59 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : container 30863 exited with status 137 Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431)" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=info msg="Setting parallel job count to 7" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=info msg="Using sqlite as database backend" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="systemd-logind: Unknown object '/'." Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using graph driver overlay" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using run root /run/user/3001/containers" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using transient store: false" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Cached value indicated that native-diff is usable" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Initializing event backend file" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431)" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Shutting down engines" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=31562 Mar 07 11:45:59 managed-node2 systemd[29271]: Stopping libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 105. Mar 07 11:45:59 managed-node2 systemd[29271]: Stopped libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 105 and the job result is done. Mar 07 11:45:59 managed-node2 systemd[29271]: Removed slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice - cgroup user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 104 and the job result is done. Mar 07 11:45:59 managed-node2 podman[31542]: Pods stopped: Mar 07 11:45:59 managed-node2 podman[31542]: 46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399 Mar 07 11:45:59 managed-node2 podman[31542]: Pods removed: Mar 07 11:45:59 managed-node2 podman[31542]: 46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399 Mar 07 11:45:59 managed-node2 podman[31542]: Secrets removed: Mar 07 11:45:59 managed-node2 podman[31542]: Volumes removed: Mar 07 11:45:59 managed-node2 systemd[29271]: Created slice user-libpod_pod_02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a.slice - cgroup user-libpod_pod_02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Mar 07 11:45:59 managed-node2 systemd[29271]: Started libpod-879669150b8fd150356b3d47d7f340be20e76730c97501db0eb82939c5fc9bd1.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 111. Mar 07 11:45:59 managed-node2 kernel: catatonit[31575]: segfault at a9b80 ip 00007f139f341dbb sp 00007fff16d48850 error 4 in catatonit[4dbb,7f139f33e000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:59 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:59 managed-node2 systemd-coredump[31577]: Process 31575 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:59 managed-node2 systemd[1]: Started systemd-coredump@8-31577-0.service - Process Core Dump (PID 31577/UID 0). ░░ Subject: A start job for unit systemd-coredump@8-31577-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@8-31577-0.service has finished successfully. ░░ ░░ The job identifier is 2144. Mar 07 11:45:59 managed-node2 systemd[29271]: Started rootless-netns-461281de.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 115. Mar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:45:59 managed-node2 kernel: veth0: entered allmulticast mode Mar 07 11:45:59 managed-node2 kernel: veth0: entered promiscuous mode Mar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:45:59 managed-node2 systemd-coredump[31579]: Process 31575 (catatonit) of user 3001 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007f139f341dbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 31575 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 31575 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:59 managed-node2 systemd[1]: systemd-coredump@8-31577-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@8-31577-0.service has successfully entered the 'dead' state. Mar 07 11:45:59 managed-node2 conmon[31573]: conmon 879669150b8fd150356b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-3001.slice/user@3001.service/user.slice/libpod-879669150b8fd150356b3d47d7f340be20e76730c97501db0eb82939c5fc9bd1.scope/container/memory.events Mar 07 11:45:59 managed-node2 systemd[29271]: Started run-p31598-i31599.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 119. Mar 07 11:45:59 managed-node2 systemd[29271]: Started libpod-fa374669b33b370f44864c8cec6d40cc032a256cc26f6dca99aab760c7fd4b53.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 123. Mar 07 11:45:59 managed-node2 kernel: catatonit[31627]: segfault at a9b80 ip 00007fb82f5dbdbb sp 00007fff96168e50 error 4 in catatonit[4dbb,7fb82f5d8000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:59 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:59 managed-node2 systemd-coredump[31632]: Process 31627 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:59 managed-node2 systemd[1]: Started systemd-coredump@9-31632-0.service - Process Core Dump (PID 31632/UID 0). ░░ Subject: A start job for unit systemd-coredump@9-31632-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@9-31632-0.service has finished successfully. ░░ ░░ The job identifier is 2153. Mar 07 11:45:59 managed-node2 systemd[29271]: Started libpod-309bf1ae864fe28aa1049bfc71c43fecc7211dc1b56ee779eeec10eb7def34c6.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 128. Mar 07 11:45:59 managed-node2 podman[31542]: Pod: Mar 07 11:45:59 managed-node2 systemd[29271]: podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range Mar 07 11:45:59 managed-node2 systemd[29271]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 91. Mar 07 11:45:59 managed-node2 podman[31542]: 02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a Mar 07 11:45:59 managed-node2 podman[31542]: Container: Mar 07 11:45:59 managed-node2 podman[31542]: 309bf1ae864fe28aa1049bfc71c43fecc7211dc1b56ee779eeec10eb7def34c6 Mar 07 11:45:59 managed-node2 systemd-coredump[31633]: Process 31627 (catatonit) of user 3001 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007fb82f5dbdbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 31627 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 31627 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:59 managed-node2 systemd[1]: systemd-coredump@9-31632-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@9-31632-0.service has successfully entered the 'dead' state. Mar 07 11:45:59 managed-node2 sudo[31529]: pam_unix(sudo:session): session closed for user podman_basic_user Mar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:45:59 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Mar 07 11:45:59 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Mar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:45:59 managed-node2 kernel: catatonit[31700]: segfault at a9b80 ip 00007fea7c676dbb sp 00007ffef7d06ba0 error 4 in catatonit[4dbb,7fea7c673000+77000] likely on CPU 0 (core 0, socket 0) Mar 07 11:45:59 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:59 managed-node2 systemd-coredump[31705]: Process 31700 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:59 managed-node2 systemd[1]: Started systemd-coredump@10-31705-0.service - Process Core Dump (PID 31705/UID 0). ░░ Subject: A start job for unit systemd-coredump@10-31705-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@10-31705-0.service has finished successfully. ░░ ░░ The job identifier is 2162. Mar 07 11:46:00 managed-node2 systemd-coredump[31708]: Process 31700 (catatonit) of user 3001 dumped core. Module /usr/libexec/catatonit/catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 31700: #0 0x00007fea7c676dbb __libc_setup_tls (/usr/libexec/catatonit/catatonit + 0x4dbb) #1 0x00007fea7c676a79 __libc_start_main_impl (/usr/libexec/catatonit/catatonit + 0x4a79) #2 0x00007fea7c6744e5 _start (/usr/libexec/catatonit/catatonit + 0x24e5) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 31700 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 31700 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:00 managed-node2 systemd[1]: systemd-coredump@10-31705-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@10-31705-0.service has successfully entered the 'dead' state. Mar 07 11:46:00 managed-node2 python3.12[31846]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Mar 07 11:46:00 managed-node2 python3.12[32002]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:46:01 managed-node2 python3.12[32159]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:46:02 managed-node2 python3.12[32315]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:02 managed-node2 python3.12[32470]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:03 managed-node2 podman[32648]: 2026-03-07 11:46:03.898635121 -0500 EST m=+0.392627575 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:46:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:04 managed-node2 python3.12[32839]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:46:04 managed-node2 python3.12[32994]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:05 managed-node2 python3.12[33149]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Mar 07 11:46:05 managed-node2 python3.12[33274]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901964.9849694-14845-101719091169581/.source.yml _original_basename=.e466nazg follow=False checksum=3ff675c4424d0c6a65148416b04367244e5cae81 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:05 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.010232203 -0500 EST m=+0.016945230 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge) Mar 07 11:46:06 managed-node2 systemd[1]: Created slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice - cgroup machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice. ░░ Subject: A start job for unit machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice has finished successfully. ░░ ░░ The job identifier is 2171. Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.051819164 -0500 EST m=+0.058532100 container create ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58) Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.055915225 -0500 EST m=+0.062628141 pod create 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2) Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.081531085 -0500 EST m=+0.088244108 container create 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1039] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.05840266 -0500 EST m=+0.065115702 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:06 managed-node2 kernel: veth0: entered allmulticast mode Mar 07 11:46:06 managed-node2 kernel: veth0: entered promiscuous mode Mar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:46:06 managed-node2 (udev-worker)[33449]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1191] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1201] device (veth0): carrier: link connected Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1203] device (podman1): carrier: link connected Mar 07 11:46:06 managed-node2 (udev-worker)[33448]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1470] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1475] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1481] device (podman1): Activation: starting connection 'podman1' (7024fc22-fe75-4cea-afb7-75608193f035) Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1484] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1486] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1488] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1531] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Mar 07 11:46:06 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2177. Mar 07 11:46:06 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2177. Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1973] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1976] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1982] device (podman1): Activation: successful, device activated. Mar 07 11:46:06 managed-node2 systemd[1]: Started run-p33482-i33483.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p33482-i33483.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p33482-i33483.scope has finished successfully. ░░ ░░ The job identifier is 2256. Mar 07 11:46:06 managed-node2 aardvark-dns[33482]: starting aardvark on a child with pid 33489 Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Successfully parsed config Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Listen v6 ip {} Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Mar 07 11:46:06 managed-node2 systemd[1]: Started libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope. ░░ Subject: A start job for unit libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has finished successfully. ░░ ░░ The job identifier is 2262. Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : terminal_ctrl_fd: 12 Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : winsz read side: 16, winsz write side: 17 Mar 07 11:46:06 managed-node2 systemd[1]: Started libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope - libcrun container. ░░ Subject: A start job for unit libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has finished successfully. ░░ ░░ The job identifier is 2269. Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : container PID: 33495 Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.278669784 -0500 EST m=+0.285382817 container init ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58) Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.281608185 -0500 EST m=+0.288321275 container start ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58) Mar 07 11:46:06 managed-node2 kernel: catatonit[33495]: segfault at a9b80 ip 00007f4ad6a29dbb sp 00007ffcbefcdc50 error 4 in catatonit[4dbb,7f4ad6a26000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:46:06 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:06 managed-node2 systemd-coredump[33497]: Process 33495 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:06 managed-node2 systemd[1]: Started systemd-coredump@11-33497-0.service - Process Core Dump (PID 33497/UID 0). ░░ Subject: A start job for unit systemd-coredump@11-33497-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@11-33497-0.service has finished successfully. ░░ ░░ The job identifier is 2276. Mar 07 11:46:06 managed-node2 systemd[1]: Started libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope. ░░ Subject: A start job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished successfully. ░░ ░░ The job identifier is 2285. Mar 07 11:46:06 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/11/attach} Mar 07 11:46:06 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : terminal_ctrl_fd: 11 Mar 07 11:46:06 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : winsz read side: 15, winsz write side: 16 Mar 07 11:46:06 managed-node2 systemd[1]: Started libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope - libcrun container. ░░ Subject: A start job for unit libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished successfully. ░░ ░░ The job identifier is 2292. Mar 07 11:46:06 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : container PID: 33503 Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.352403787 -0500 EST m=+0.359116948 container init 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.357465195 -0500 EST m=+0.364178282 container start 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.361993922 -0500 EST m=+0.368706973 pod start 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2) Mar 07 11:46:06 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Mar 07 11:46:06 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 Container: 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 Mar 07 11:46:06 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2026-03-07T11:46:06-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2026-03-07T11:46:06-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2026-03-07T11:46:06-05:00" level=info msg="Setting parallel job count to 7" time="2026-03-07T11:46:06-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2026-03-07T11:46:06-05:00" level=info msg="Using sqlite as database backend" time="2026-03-07T11:46:06-05:00" level=debug msg="Using graph driver overlay" time="2026-03-07T11:46:06-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Using run root /run/containers/storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2026-03-07T11:46:06-05:00" level=debug msg="Using tmp dir /run/libpod" time="2026-03-07T11:46:06-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2026-03-07T11:46:06-05:00" level=debug msg="Using transient store: false" time="2026-03-07T11:46:06-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2026-03-07T11:46:06-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2026-03-07T11:46:06-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2026-03-07T11:46:06-05:00" level=debug msg="Initializing event backend journald" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d bridge podman1 2026-03-07 11:44:18.828483768 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2026-03-07T11:46:06-05:00" level=debug msg="Successfully loaded 2 networks" time="2026-03-07T11:46:06-05:00" level=debug msg="Pod using bridge network mode" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice for parent machine.slice and name libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="no command or entrypoint provided, and no CMD or ENTRYPOINT from image: defaulting to empty string" time="2026-03-07T11:46:06-05:00" level=debug msg="using systemd mode: false" time="2026-03-07T11:46:06-05:00" level=debug msg="setting container name 09b7f33e3afd-infra" time="2026-03-07T11:46:06-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Allocated lock 1 for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2026-03-07T11:46:06-05:00" level=debug msg="Created container \"ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Container \"ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\" has work directory \"/var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Container \"ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\" has run directory \"/run/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:46:06-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:46:06-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2026-03-07T11:46:06-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:46:06-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:46:06-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:46:06-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:46:06-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:46:06-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:46:06-05:00" level=debug msg="using systemd mode: false" time="2026-03-07T11:46:06-05:00" level=debug msg="adding container to pod httpd2" time="2026-03-07T11:46:06-05:00" level=debug msg="setting container name httpd2-httpd2" time="2026-03-07T11:46:06-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2026-03-07T11:46:06-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /proc" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /dev" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /dev/pts" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /dev/mqueue" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /sys" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2026-03-07T11:46:06-05:00" level=debug msg="Allocated lock 2 for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Created container \"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Container \"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\" has work directory \"/var/lib/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Container \"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\" has run directory \"/run/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Strongconnecting node ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="Pushed ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b onto stack" time="2026-03-07T11:46:06-05:00" level=debug msg="Finishing node ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b. Popped ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b off stack" time="2026-03-07T11:46:06-05:00" level=debug msg="Strongconnecting node 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08" time="2026-03-07T11:46:06-05:00" level=debug msg="Pushed 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 onto stack" time="2026-03-07T11:46:06-05:00" level=debug msg="Finishing node 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08. Popped 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 off stack" time="2026-03-07T11:46:06-05:00" level=debug msg="Made network namespace at /run/netns/netns-57a5144e-40ac-4a85-01ac-9226ddb3e6f8 for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="Created root filesystem for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b at /var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/rootfs/merge" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::bridge] Using mtu 9001 from default route interface for the network [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/ip_forward to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/route_localnet to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink_route] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_52ab27bf_10_89_0_0_nm24 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "ee:98:79:da:ba:e0", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2026-03-07T11:46:06-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2026-03-07T11:46:06-05:00" level=debug msg="Setting Cgroups for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b to machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice:libpod:ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2026-03-07T11:46:06-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/rootfs/merge\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Created OCI spec for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b at /var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata/config.json" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice for parent machine.slice and name libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2026-03-07T11:46:06-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b -u ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata -p /run/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata/pidfile -n 09b7f33e3afd-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b]" time="2026-03-07T11:46:06-05:00" level=info msg="Running conmon under slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice and unitName libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope" time="2026-03-07T11:46:06-05:00" level=debug msg="Received: 33495" time="2026-03-07T11:46:06-05:00" level=info msg="Got Conmon PID as 33493" time="2026-03-07T11:46:06-05:00" level=debug msg="Created container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b in OCI runtime" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2026-03-07T11:46:06-05:00" level=debug msg="Starting container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b with command [/catatonit -P]" time="2026-03-07T11:46:06-05:00" level=debug msg="Started container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/MVNQJ7CO6BHDSUCUPVG3N5YCHU,upperdir=/var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/diff,workdir=/var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c91,c172\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Mounted container \"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\" at \"/var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/merged\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Created root filesystem for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 at /var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/merged" time="2026-03-07T11:46:06-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2026-03-07T11:46:06-05:00" level=debug msg="Setting Cgroups for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 to machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice:libpod:9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08" time="2026-03-07T11:46:06-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2026-03-07T11:46:06-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2026-03-07T11:46:06-05:00" level=debug msg="Created OCI spec for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 at /var/lib/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata/config.json" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice for parent machine.slice and name libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2026-03-07T11:46:06-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 -u 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata -p /run/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08]" time="2026-03-07T11:46:06-05:00" level=info msg="Running conmon under slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice and unitName libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope" time="2026-03-07T11:46:06-05:00" level=debug msg="Received: 33503" time="2026-03-07T11:46:06-05:00" level=info msg="Got Conmon PID as 33500" time="2026-03-07T11:46:06-05:00" level=debug msg="Created container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 in OCI runtime" time="2026-03-07T11:46:06-05:00" level=debug msg="Starting container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 with command [/bin/busybox-extras httpd -f -p 80]" time="2026-03-07T11:46:06-05:00" level=debug msg="Started container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08" time="2026-03-07T11:46:06-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2026-03-07T11:46:06-05:00" level=debug msg="Shutting down engines" time="2026-03-07T11:46:06-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=33436 Mar 07 11:46:06 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Mar 07 11:46:06 managed-node2 systemd-coredump[33499]: Process 33495 (catatonit) of user 0 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007f4ad6a29dbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 33495 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 33495 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:06 managed-node2 systemd[1]: libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has successfully entered the 'dead' state. Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : container 33495 exited with status 139 Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice/libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope/container/memory.events Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : Cgroup appears to have been removed, stopping OOM monitoring Mar 07 11:46:06 managed-node2 systemd[1]: systemd-coredump@11-33497-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@11-33497-0.service has successfully entered the 'dead' state. Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b)" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="Setting parallel job count to 7" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="Using sqlite as database backend" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using graph driver overlay" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using run root /run/containers/storage" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using tmp dir /run/libpod" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using transient store: false" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that metacopy is being used" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Initializing event backend journald" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Mar 07 11:46:06 managed-node2 podman[33506]: 2026-03-07 11:46:06.442858751 -0500 EST m=+0.031283817 container died ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra) Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Cleaning up container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Tearing down network namespace at /run/netns/netns-57a5144e-40ac-4a85-01ac-9226ddb3e6f8 for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d bridge podman1 2026-03-07 11:44:18.828483768 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Successfully loaded 2 networks" Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Received SIGHUP Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Successfully parsed config Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Listen v4 ip {} Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Listen v6 ip {} Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: No configuration found stopping the sever Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Tearing down..\n" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [INFO netavark::firewall] Using nftables firewall driver\n" Mar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:06 managed-node2 systemd[1]: run-p33482-i33483.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p33482-i33483.scope has successfully entered the 'dead' state. Mar 07 11:46:06 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Mar 07 11:46:06 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Mar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [INFO netavark::network::bridge] removing bridge podman1\n" Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.4783] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"INPUT\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(Meta(Meta { key: L4proto })), right: Named(Set([Element(String(\"tcp\")), Element(String(\"udp\"))])), op: EQ }), Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"th\", field: \"dport\" }))), right: Number(53), op: EQ }), Accept(None)], handle: Some(23), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"daddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(CT(CT { key: \"state\", family: None, dir: None })), right: List([String(\"established\"), String(\"related\")]), op: IN }), Accept(None)], handle: Some(24), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Accept(None)], handle: Some(25), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"POSTROUTING\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Jump(JumpTarget { target: \"nv_52ab27bf_10_89_0_0_nm24\" })], handle: Some(26), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 4 rules\n[DEBUG netavark::firewall::nft] Found chain nv_52ab27bf_10_89_0_0_nm24\n" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [DEBUG netavark::firewall::firewalld] Removing firewalld rules for IPs 10.89.0.0/24\n" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"NETAVARK-ISOLATION-3\", expr: [Match(Match { left: Named(Meta(Meta { key: Oifname })), right: String(\"podman1\"), op: EQ }), Drop(None)], handle: Some(17), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 1 isolation rules for network\n" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Found chain nv_52ab27bf_10_89_0_0_nm24_dnat\n[DEBUG netavark::firewall::nft] Found chain nv_52ab27bf_10_89_0_0_nm24_dnat\n" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Teardown complete\n" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Successfully cleaned up container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" Mar 07 11:46:06 managed-node2 podman[33506]: 2026-03-07 11:46:06.54231054 -0500 EST m=+0.130735647 container cleanup ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58) Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b)" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Shutting down engines" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=33506 Mar 07 11:46:06 managed-node2 systemd[1]: libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has successfully entered the 'dead' state. Mar 07 11:46:06 managed-node2 python3.12[33684]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Mar 07 11:46:06 managed-node2 systemd[1]: Reload requested from client PID 33685 ('systemctl') (unit session-7.scope)... Mar 07 11:46:06 managed-node2 systemd[1]: Reloading... Mar 07 11:46:07 managed-node2 systemd-rc-local-generator[33727]: /etc/rc.d/rc.local is not marked executable, skipping. Mar 07 11:46:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b-rootfs-merge.mount has successfully entered the 'dead' state. Mar 07 11:46:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b-userdata-shm.mount has successfully entered the 'dead' state. Mar 07 11:46:07 managed-node2 systemd[1]: run-netns-netns\x2d57a5144e\x2d40ac\x2d4a85\x2d01ac\x2d9226ddb3e6f8.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d57a5144e\x2d40ac\x2d4a85\x2d01ac\x2d9226ddb3e6f8.mount has successfully entered the 'dead' state. Mar 07 11:46:07 managed-node2 systemd[1]: Reloading finished in 222 ms. Mar 07 11:46:07 managed-node2 python3.12[33906]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Mar 07 11:46:07 managed-node2 systemd[1]: Reload requested from client PID 33909 ('systemctl') (unit session-7.scope)... Mar 07 11:46:07 managed-node2 systemd[1]: Reloading... Mar 07 11:46:07 managed-node2 systemd-rc-local-generator[33960]: /etc/rc.d/rc.local is not marked executable, skipping. Mar 07 11:46:07 managed-node2 systemd[1]: Reloading finished in 214 ms. Mar 07 11:46:08 managed-node2 python3.12[34130]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Mar 07 11:46:08 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2377. Mar 07 11:46:08 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2299. Mar 07 11:46:08 managed-node2 podman[34134]: 2026-03-07 11:46:08.575544958 -0500 EST m=+0.022390462 pod stop 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2) Mar 07 11:46:10 managed-node2 podman[31698]: time="2026-03-07T11:46:10-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Mar 07 11:46:10 managed-node2 systemd[29271]: Removed slice user-libpod_pod_02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a.slice - cgroup user-libpod_pod_02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 133 and the job result is done. Mar 07 11:46:10 managed-node2 podman[31698]: Pods stopped: Mar 07 11:46:10 managed-node2 podman[31698]: 02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a Mar 07 11:46:10 managed-node2 podman[31698]: Pods removed: Mar 07 11:46:10 managed-node2 podman[31698]: 02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a Mar 07 11:46:10 managed-node2 podman[31698]: Secrets removed: Mar 07 11:46:10 managed-node2 podman[31698]: Volumes removed: Mar 07 11:46:10 managed-node2 systemd[29271]: podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service: Consumed 753ms CPU time, 80.9M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Mar 07 11:46:16 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Mar 07 11:46:18 managed-node2 podman[34134]: time="2026-03-07T11:46:18-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Mar 07 11:46:18 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : container 33503 exited with status 137 Mar 07 11:46:18 managed-node2 systemd[1]: libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has successfully entered the 'dead' state. Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.605105738 -0500 EST m=+10.051951370 container died 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08)" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=info msg="Setting parallel job count to 7" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=info msg="Using sqlite as database backend" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using graph driver overlay" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using run root /run/containers/storage" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using tmp dir /run/libpod" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using transient store: false" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Cached value indicated that metacopy is being used" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Initializing event backend journald" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Mar 07 11:46:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054-merged.mount has successfully entered the 'dead' state. Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.639159774 -0500 EST m=+10.086005199 container cleanup 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=info msg="Received shutdown signal \"terminated\", terminating!" PID=34157 Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=info msg="Invoking shutdown handler \"libpod\"" PID=34157 Mar 07 11:46:18 managed-node2 systemd[1]: Stopping libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope... ░░ Subject: A stop job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has begun execution. ░░ ░░ The job identifier is 2385. Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08)" Mar 07 11:46:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Completed shutdown handler \"libpod\", duration 0s" PID=34157 Mar 07 11:46:18 managed-node2 systemd[1]: libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has successfully entered the 'dead' state. Mar 07 11:46:18 managed-node2 systemd[1]: Stopped libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope. ░░ Subject: A stop job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished. ░░ ░░ The job identifier is 2385 and the job result is done. Mar 07 11:46:18 managed-node2 systemd[1]: Removed slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice - cgroup machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice. ░░ Subject: A stop job for unit machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice has finished. ░░ ░░ The job identifier is 2384 and the job result is done. Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.687385492 -0500 EST m=+10.134230912 pod stop 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.7198332 -0500 EST m=+10.166678633 container remove 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.74111814 -0500 EST m=+10.187963572 container remove ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.748131526 -0500 EST m=+10.194976927 pod remove 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2) Mar 07 11:46:18 managed-node2 podman[34134]: Pods stopped: Mar 07 11:46:18 managed-node2 podman[34134]: 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 Mar 07 11:46:18 managed-node2 podman[34134]: Pods removed: Mar 07 11:46:18 managed-node2 podman[34134]: 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 Mar 07 11:46:18 managed-node2 podman[34134]: Secrets removed: Mar 07 11:46:18 managed-node2 podman[34134]: Volumes removed: Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.7485712 -0500 EST m=+10.195416609 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.765571666 -0500 EST m=+10.212417099 container create f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice - cgroup machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice. ░░ Subject: A start job for unit machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice has finished successfully. ░░ ░░ The job identifier is 2388. Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.805707536 -0500 EST m=+10.252552936 container create 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.809765847 -0500 EST m=+10.256611247 pod create 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 (image=, name=httpd2) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.811528719 -0500 EST m=+10.258374297 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.833756495 -0500 EST m=+10.280601983 container create 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.834103154 -0500 EST m=+10.280948599 container restart f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:18 managed-node2 systemd[1]: Started libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope - libcrun container. ░░ Subject: A start job for unit libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope has finished successfully. ░░ ░░ The job identifier is 2394. Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.897730576 -0500 EST m=+10.344576055 container init f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.900084574 -0500 EST m=+10.346930106 container start f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:18 managed-node2 kernel: catatonit[34171]: segfault at a9b80 ip 00007ff9206acdbb sp 00007ffc502fe200 error 4 in catatonit[4dbb,7ff9206a9000+77000] likely on CPU 0 (core 0, socket 0) Mar 07 11:46:18 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:18 managed-node2 systemd-coredump[34173]: Process 34171 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:18 managed-node2 systemd[1]: Started systemd-coredump@12-34173-0.service - Process Core Dump (PID 34173/UID 0). ░░ Subject: A start job for unit systemd-coredump@12-34173-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@12-34173-0.service has finished successfully. ░░ ░░ The job identifier is 2400. Mar 07 11:46:18 managed-node2 kernel: veth0: entered allmulticast mode Mar 07 11:46:18 managed-node2 kernel: veth0: entered promiscuous mode Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9395] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:46:18 managed-node2 (udev-worker)[34175]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:18 managed-node2 (udev-worker)[34176]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9459] device (podman1): carrier: link connected Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9484] device (veth0): carrier: link connected Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9487] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9699] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9734] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9762] device (podman1): Activation: starting connection 'podman1' (a476fb3c-4953-4a99-8c37-c91c928220c1) Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9763] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9766] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9767] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9770] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Mar 07 11:46:18 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2409. Mar 07 11:46:19 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2409. Mar 07 11:46:19 managed-node2 NetworkManager[807]: [1772901979.0167] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Mar 07 11:46:19 managed-node2 NetworkManager[807]: [1772901979.0175] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Mar 07 11:46:19 managed-node2 NetworkManager[807]: [1772901979.0184] device (podman1): Activation: successful, device activated. Mar 07 11:46:19 managed-node2 systemd[1]: Started run-p34216-i34217.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p34216-i34217.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p34216-i34217.scope has finished successfully. ░░ ░░ The job identifier is 2488. Mar 07 11:46:19 managed-node2 systemd-coredump[34177]: Process 34171 (catatonit) of user 0 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007ff9206acdbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 34171 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 34171 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:19 managed-node2 systemd[1]: libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope has successfully entered the 'dead' state. Mar 07 11:46:19 managed-node2 conmon[34169]: conmon f021e1ac269371a1a5c6 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope/container/memory.events Mar 07 11:46:19 managed-node2 systemd[1]: systemd-coredump@12-34173-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@12-34173-0.service has successfully entered the 'dead' state. Mar 07 11:46:19 managed-node2 podman[34226]: 2026-03-07 11:46:19.118293703 -0500 EST m=+0.022854670 container died f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:19 managed-node2 systemd[1]: Started libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope - libcrun container. ░░ Subject: A start job for unit libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope has finished successfully. ░░ ░░ The job identifier is 2494. Mar 07 11:46:19 managed-node2 podman[34226]: 2026-03-07 11:46:19.144694335 -0500 EST m=+0.049255264 container cleanup f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.149467711 -0500 EST m=+10.596313208 container init 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.1518268 -0500 EST m=+10.598672204 container start 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:19 managed-node2 kernel: catatonit[34227]: segfault at a9b80 ip 00007f45bdca9dbb sp 00007fff72636820 error 4 in catatonit[4dbb,7f45bdca6000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:46:19 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:19 managed-node2 systemd-coredump[34240]: Process 34227 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:19 managed-node2 systemd[1]: Started systemd-coredump@13-34240-0.service - Process Core Dump (PID 34240/UID 0). ░░ Subject: A start job for unit systemd-coredump@13-34240-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@13-34240-0.service has finished successfully. ░░ ░░ The job identifier is 2501. Mar 07 11:46:19 managed-node2 systemd[1]: Started libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope - libcrun container. ░░ Subject: A start job for unit libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope has finished successfully. ░░ ░░ The job identifier is 2510. Mar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.197461095 -0500 EST m=+10.644306639 container init 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Mar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.200754871 -0500 EST m=+10.647600358 container start 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test) Mar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.205111382 -0500 EST m=+10.651956899 pod start 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 (image=, name=httpd2) Mar 07 11:46:19 managed-node2 podman[34134]: Pod: Mar 07 11:46:19 managed-node2 podman[34134]: 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 Mar 07 11:46:19 managed-node2 podman[34134]: Container: Mar 07 11:46:19 managed-node2 podman[34134]: 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 Mar 07 11:46:19 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range Mar 07 11:46:19 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2299. Mar 07 11:46:19 managed-node2 podman[34248]: 2026-03-07 11:46:19.270404055 -0500 EST m=+0.035233803 pod stop 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 (image=, name=httpd2) Mar 07 11:46:19 managed-node2 systemd-coredump[34241]: Process 34227 (catatonit) of user 0 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007f45bdca9dbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 34227 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 34227 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:19 managed-node2 systemd[1]: libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope has successfully entered the 'dead' state. Mar 07 11:46:19 managed-node2 systemd[1]: systemd-coredump@13-34240-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@13-34240-0.service has successfully entered the 'dead' state. Mar 07 11:46:19 managed-node2 podman[34283]: 2026-03-07 11:46:19.327174521 -0500 EST m=+0.016512016 container died 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:19 managed-node2 systemd[1]: run-p34216-i34217.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p34216-i34217.scope has successfully entered the 'dead' state. Mar 07 11:46:19 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:19 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Mar 07 11:46:19 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Mar 07 11:46:19 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:19 managed-node2 NetworkManager[807]: [1772901979.3664] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Mar 07 11:46:19 managed-node2 podman[34283]: 2026-03-07 11:46:19.426367765 -0500 EST m=+0.115705152 container cleanup 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a-rootfs-merge.mount has successfully entered the 'dead' state. Mar 07 11:46:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a-userdata-shm.mount has successfully entered the 'dead' state. Mar 07 11:46:20 managed-node2 python3.12[34439]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:46:21 managed-node2 python3.12[34596]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:46:21 managed-node2 python3.12[34752]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:22 managed-node2 python3.12[34907]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:23 managed-node2 podman[35085]: 2026-03-07 11:46:23.121556991 -0500 EST m=+0.299999620 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:46:23 managed-node2 python3.12[35275]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:46:24 managed-node2 python3.12[35430]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:24 managed-node2 python3.12[35585]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Mar 07 11:46:24 managed-node2 python3.12[35710]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901984.2287433-15536-81376403194330/.source.yml _original_basename=.8zbijrin follow=False checksum=4ea4a304b347a6aaa397596e57cb6db94ea16b46 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:25 managed-node2 python3.12[35865]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.275495475 -0500 EST m=+0.013172190 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge) Mar 07 11:46:25 managed-node2 systemd[1]: Created slice machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice - cgroup machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice. ░░ Subject: A start job for unit machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice has finished successfully. ░░ ░░ The job identifier is 2517. Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.311817507 -0500 EST m=+0.049494315 container create 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d) Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.316459547 -0500 EST m=+0.054136253 pod create 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d (image=, name=httpd3) Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.342690191 -0500 EST m=+0.080367000 container create fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3550] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/7) Mar 07 11:46:25 managed-node2 (udev-worker)[35883]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.318601722 -0500 EST m=+0.056278508 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:46:25 managed-node2 kernel: veth0: entered allmulticast mode Mar 07 11:46:25 managed-node2 kernel: veth0: entered promiscuous mode Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3822] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Mar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:46:25 managed-node2 (udev-worker)[35885]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3879] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3889] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3899] device (podman1): Activation: starting connection 'podman1' (6d8bf7d8-ae00-4628-b267-3ec15c9e992b) Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3904] device (veth0): carrier: link connected Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3907] device (podman1): carrier: link connected Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3909] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3915] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3918] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3922] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3944] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3948] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3952] device (podman1): Activation: successful, device activated. Mar 07 11:46:25 managed-node2 systemd[1]: Started run-p35920-i35921.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p35920-i35921.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p35920-i35921.scope has finished successfully. ░░ ░░ The job identifier is 2523. Mar 07 11:46:25 managed-node2 systemd[1]: Started libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope. ░░ Subject: A start job for unit libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has finished successfully. ░░ ░░ The job identifier is 2529. Mar 07 11:46:25 managed-node2 systemd[1]: Started libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope - libcrun container. ░░ Subject: A start job for unit libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has finished successfully. ░░ ░░ The job identifier is 2536. Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.521896266 -0500 EST m=+0.259573093 container init 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d) Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.524546913 -0500 EST m=+0.262223671 container start 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d) Mar 07 11:46:25 managed-node2 kernel: catatonit[35928]: segfault at a9b80 ip 00007f6f72c16dbb sp 00007ffefa9d1c40 error 4 in catatonit[4dbb,7f6f72c13000+77000] likely on CPU 0 (core 0, socket 0) Mar 07 11:46:25 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:25 managed-node2 systemd-coredump[35930]: Process 35928 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:25 managed-node2 systemd[1]: Started systemd-coredump@14-35930-0.service - Process Core Dump (PID 35930/UID 0). ░░ Subject: A start job for unit systemd-coredump@14-35930-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@14-35930-0.service has finished successfully. ░░ ░░ The job identifier is 2543. Mar 07 11:46:25 managed-node2 systemd[1]: Started libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope. ░░ Subject: A start job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished successfully. ░░ ░░ The job identifier is 2552. Mar 07 11:46:25 managed-node2 systemd[1]: Started libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope - libcrun container. ░░ Subject: A start job for unit libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished successfully. ░░ ░░ The job identifier is 2559. Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.585157387 -0500 EST m=+0.322834314 container init fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.589721993 -0500 EST m=+0.327398858 container start fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.594062087 -0500 EST m=+0.331738925 pod start 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d (image=, name=httpd3) Mar 07 11:46:25 managed-node2 systemd-coredump[35931]: Process 35928 (catatonit) of user 0 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007f6f72c16dbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 35928 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 35928 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:25 managed-node2 systemd[1]: libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has successfully entered the 'dead' state. Mar 07 11:46:25 managed-node2 systemd[1]: systemd-coredump@14-35930-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@14-35930-0.service has successfully entered the 'dead' state. Mar 07 11:46:25 managed-node2 podman[35941]: 2026-03-07 11:46:25.677668355 -0500 EST m=+0.029940803 container died 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra) Mar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:25 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Mar 07 11:46:25 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Mar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:25 managed-node2 systemd[1]: run-p35920-i35921.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p35920-i35921.scope has successfully entered the 'dead' state. Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.7169] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Mar 07 11:46:25 managed-node2 systemd[1]: run-netns-netns\x2d642376bf\x2d1b5a\x2dbb61\x2d6a3b\x2dfe70bdea2608.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d642376bf\x2d1b5a\x2dbb61\x2d6a3b\x2dfe70bdea2608.mount has successfully entered the 'dead' state. Mar 07 11:46:25 managed-node2 podman[35941]: 2026-03-07 11:46:25.791383924 -0500 EST m=+0.143656388 container cleanup 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d) Mar 07 11:46:25 managed-node2 systemd[1]: libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has successfully entered the 'dead' state. Mar 07 11:46:26 managed-node2 python3.12[36118]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Mar 07 11:46:26 managed-node2 systemd[1]: Reload requested from client PID 36119 ('systemctl') (unit session-7.scope)... Mar 07 11:46:26 managed-node2 systemd[1]: Reloading... Mar 07 11:46:26 managed-node2 systemd-rc-local-generator[36161]: /etc/rc.d/rc.local is not marked executable, skipping. Mar 07 11:46:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1-userdata-shm.mount has successfully entered the 'dead' state. Mar 07 11:46:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1-rootfs-merge.mount has successfully entered the 'dead' state. Mar 07 11:46:26 managed-node2 systemd[1]: Reloading finished in 222 ms. Mar 07 11:46:27 managed-node2 python3.12[36340]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Mar 07 11:46:27 managed-node2 systemd[1]: Reload requested from client PID 36343 ('systemctl') (unit session-7.scope)... Mar 07 11:46:27 managed-node2 systemd[1]: Reloading... Mar 07 11:46:27 managed-node2 systemd-rc-local-generator[36385]: /etc/rc.d/rc.local is not marked executable, skipping. Mar 07 11:46:27 managed-node2 systemd[1]: Reloading finished in 215 ms. Mar 07 11:46:27 managed-node2 python3.12[36564]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Mar 07 11:46:27 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2566. Mar 07 11:46:27 managed-node2 podman[36568]: 2026-03-07 11:46:27.898059361 -0500 EST m=+0.023361638 pod stop 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d (image=, name=httpd3) Mar 07 11:46:29 managed-node2 podman[34248]: time="2026-03-07T11:46:29-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Mar 07 11:46:29 managed-node2 systemd[1]: libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope has successfully entered the 'dead' state. Mar 07 11:46:29 managed-node2 conmon[34243]: conmon 1d68ffb6e0e9d84fa87d : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice/libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope/container/memory.events Mar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.301508379 -0500 EST m=+10.066338217 container died 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:29 managed-node2 systemd[1]: var-lib-containers-storage-overlay-0c3bc18cb09098ff9f2d69e62d54abe819e567e28ca84575505d06b7f7092c88-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-0c3bc18cb09098ff9f2d69e62d54abe819e567e28ca84575505d06b7f7092c88-merged.mount has successfully entered the 'dead' state. Mar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.336665522 -0500 EST m=+10.101495209 container cleanup 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:29 managed-node2 systemd[1]: Removed slice machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice - cgroup machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice. ░░ Subject: A stop job for unit machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice has finished. ░░ ░░ The job identifier is 2651 and the job result is done. Mar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.366179073 -0500 EST m=+10.131008702 container remove 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.387294034 -0500 EST m=+10.152123658 container remove 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.394892423 -0500 EST m=+10.159722015 pod remove 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 (image=, name=httpd2) Mar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.413822806 -0500 EST m=+10.178652430 container remove f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:29 managed-node2 podman[34248]: time="2026-03-07T11:46:29-05:00" level=error msg="Checking whether service of container f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a can be stopped: no container with ID f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a found in database: no such container" Mar 07 11:46:29 managed-node2 podman[34248]: Pods stopped: Mar 07 11:46:29 managed-node2 podman[34248]: 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 Mar 07 11:46:29 managed-node2 podman[34248]: Pods removed: Mar 07 11:46:29 managed-node2 podman[34248]: 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 Mar 07 11:46:29 managed-node2 podman[34248]: Secrets removed: Mar 07 11:46:29 managed-node2 podman[34248]: Volumes removed: Mar 07 11:46:29 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state. Mar 07 11:46:35 managed-node2 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 2653. Mar 07 11:46:35 managed-node2 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Mar 07 11:46:35 managed-node2 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 2653. Mar 07 11:46:35 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Mar 07 11:46:37 managed-node2 podman[36568]: time="2026-03-07T11:46:37-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Mar 07 11:46:37 managed-node2 systemd[1]: libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has successfully entered the 'dead' state. Mar 07 11:46:37 managed-node2 podman[36568]: 2026-03-07 11:46:37.926712157 -0500 EST m=+10.052014514 container died fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Mar 07 11:46:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ee6d4c1dab7f57321763e3e557cd90f2d6b0b9b7aeaf3ef8eab8ca49efa608d6-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-ee6d4c1dab7f57321763e3e557cd90f2d6b0b9b7aeaf3ef8eab8ca49efa608d6-merged.mount has successfully entered the 'dead' state. Mar 07 11:46:37 managed-node2 podman[36568]: 2026-03-07 11:46:37.960570271 -0500 EST m=+10.085872435 container cleanup fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Mar 07 11:46:37 managed-node2 systemd[1]: Stopping libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope... ░░ Subject: A stop job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has begun execution. ░░ ░░ The job identifier is 2732. Mar 07 11:46:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:37 managed-node2 systemd[1]: libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has successfully entered the 'dead' state. Mar 07 11:46:37 managed-node2 systemd[1]: Stopped libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope. ░░ Subject: A stop job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished. ░░ ░░ The job identifier is 2732 and the job result is done. Mar 07 11:46:38 managed-node2 systemd[1]: Removed slice machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice - cgroup machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice. ░░ Subject: A stop job for unit machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice has finished. ░░ ░░ The job identifier is 2731 and the job result is done. Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.035367566 -0500 EST m=+10.160669765 container remove fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.056136818 -0500 EST m=+10.181439022 container remove 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.063189022 -0500 EST m=+10.188491194 pod remove 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d (image=, name=httpd3) Mar 07 11:46:38 managed-node2 podman[36568]: Pods stopped: Mar 07 11:46:38 managed-node2 podman[36568]: 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d Mar 07 11:46:38 managed-node2 podman[36568]: Pods removed: Mar 07 11:46:38 managed-node2 podman[36568]: 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d Mar 07 11:46:38 managed-node2 podman[36568]: Secrets removed: Mar 07 11:46:38 managed-node2 podman[36568]: Volumes removed: Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.063738172 -0500 EST m=+10.189040347 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.080611451 -0500 EST m=+10.205913648 container create 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 systemd[1]: Created slice machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice - cgroup machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice. ░░ Subject: A start job for unit machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice has finished successfully. ░░ ░░ The job identifier is 2734. Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.119628454 -0500 EST m=+10.244930639 container create f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.12361059 -0500 EST m=+10.248912760 pod create 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a (image=, name=httpd3) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.147717736 -0500 EST m=+10.273020009 container create e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.148072438 -0500 EST m=+10.273374646 container restart 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.125319335 -0500 EST m=+10.250621668 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:46:38 managed-node2 systemd[1]: Started libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope - libcrun container. ░░ Subject: A start job for unit libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope has finished successfully. ░░ ░░ The job identifier is 2740. Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.187397166 -0500 EST m=+10.312699428 container init 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.189764724 -0500 EST m=+10.315067060 container start 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 kernel: catatonit[36613]: segfault at a9b80 ip 00007f2b0a99fdbb sp 00007ffeeae7b9e0 error 4 in catatonit[4dbb,7f2b0a99c000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:46:38 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:38 managed-node2 systemd-coredump[36615]: Process 36613 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:38 managed-node2 systemd[1]: Started systemd-coredump@15-36615-0.service - Process Core Dump (PID 36615/UID 0). ░░ Subject: A start job for unit systemd-coredump@15-36615-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@15-36615-0.service has finished successfully. ░░ ░░ The job identifier is 2746. Mar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:38 managed-node2 kernel: veth0: entered allmulticast mode Mar 07 11:46:38 managed-node2 kernel: veth0: entered promiscuous mode Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2247] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Mar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:46:38 managed-node2 (udev-worker)[36618]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2297] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2309] device (veth0): carrier: link connected Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2312] device (podman1): carrier: link connected Mar 07 11:46:38 managed-node2 (udev-worker)[36619]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2590] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2595] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2603] device (podman1): Activation: starting connection 'podman1' (2d400bb2-5548-4489-9134-38d8ab37ffca) Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2605] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2608] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2624] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2626] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Mar 07 11:46:38 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2755. Mar 07 11:46:38 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2755. Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2978] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2981] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2986] device (podman1): Activation: successful, device activated. Mar 07 11:46:38 managed-node2 systemd[1]: Started run-p36659-i36660.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p36659-i36660.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p36659-i36660.scope has finished successfully. ░░ ░░ The job identifier is 2834. Mar 07 11:46:38 managed-node2 systemd-coredump[36617]: Process 36613 (catatonit) of user 0 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007f2b0a99fdbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 36613 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 36613 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:38 managed-node2 systemd[1]: Started libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope - libcrun container. ░░ Subject: A start job for unit libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope has finished successfully. ░░ ░░ The job identifier is 2840. Mar 07 11:46:38 managed-node2 systemd[1]: libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope has successfully entered the 'dead' state. Mar 07 11:46:38 managed-node2 systemd[1]: systemd-coredump@15-36615-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@15-36615-0.service has successfully entered the 'dead' state. Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.385243424 -0500 EST m=+10.510545838 container init f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.389173233 -0500 EST m=+10.514475479 container start f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 kernel: catatonit[36666]: segfault at a9b80 ip 00007fbf485bcdbb sp 00007ffec6bbd630 error 4 in catatonit[4dbb,7fbf485b9000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:46:38 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:38 managed-node2 systemd-coredump[36678]: Process 36666 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:38 managed-node2 systemd[1]: Started systemd-coredump@16-36678-0.service - Process Core Dump (PID 36678/UID 0). ░░ Subject: A start job for unit systemd-coredump@16-36678-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@16-36678-0.service has finished successfully. ░░ ░░ The job identifier is 2847. Mar 07 11:46:38 managed-node2 podman[36668]: 2026-03-07 11:46:38.417671038 -0500 EST m=+0.036061473 container died 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 systemd[1]: Started libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope - libcrun container. ░░ Subject: A start job for unit libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope has finished successfully. ░░ ░░ The job identifier is 2856. Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.454475365 -0500 EST m=+10.579777839 container init e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.458255485 -0500 EST m=+10.583557732 container start e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 podman[36668]: 2026-03-07 11:46:38.460012504 -0500 EST m=+0.078402768 container cleanup 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.467228715 -0500 EST m=+10.592531020 pod start 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a (image=, name=httpd3) Mar 07 11:46:38 managed-node2 podman[36568]: Pod: Mar 07 11:46:38 managed-node2 podman[36568]: 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a Mar 07 11:46:38 managed-node2 podman[36568]: Container: Mar 07 11:46:38 managed-node2 podman[36568]: e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda Mar 07 11:46:38 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range Mar 07 11:46:38 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2566. Mar 07 11:46:38 managed-node2 systemd-coredump[36679]: Process 36666 (catatonit) of user 0 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007fbf485bcdbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 36666 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 36666 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:38 managed-node2 systemd[1]: libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope has successfully entered the 'dead' state. Mar 07 11:46:38 managed-node2 conmon[36664]: conmon f4619e7c96f87ca49266 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice/libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope/container/memory.events Mar 07 11:46:38 managed-node2 systemd[1]: systemd-coredump@16-36678-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@16-36678-0.service has successfully entered the 'dead' state. Mar 07 11:46:38 managed-node2 podman[36689]: 2026-03-07 11:46:38.559767062 -0500 EST m=+0.051261217 pod stop 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a (image=, name=httpd3) Mar 07 11:46:38 managed-node2 podman[36704]: 2026-03-07 11:46:38.587832463 -0500 EST m=+0.021982125 container died f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 systemd[1]: run-p36659-i36660.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p36659-i36660.scope has successfully entered the 'dead' state. Mar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:38 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Mar 07 11:46:38 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Mar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.6196] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Mar 07 11:46:38 managed-node2 podman[36704]: 2026-03-07 11:46:38.676650677 -0500 EST m=+0.110800341 container cleanup f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64-rootfs-merge.mount has successfully entered the 'dead' state. Mar 07 11:46:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64-userdata-shm.mount has successfully entered the 'dead' state. Mar 07 11:46:39 managed-node2 sudo[36930]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bhiauhbpazjvrfebfoyyszwwljeltpxs ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901998.8167956-16021-135055247896982/AnsiballZ_command.py' Mar 07 11:46:39 managed-node2 sudo[36930]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Mar 07 11:46:39 managed-node2 python3.12[36933]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:46:39 managed-node2 kernel: catatonit[36946]: segfault at a9b80 ip 00007f0938c79dbb sp 00007ffd32edf6e0 error 4 in catatonit[4dbb,7f0938c76000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:46:39 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:39 managed-node2 systemd-coredump[36953]: Process 36946 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:39 managed-node2 systemd[1]: Started systemd-coredump@17-36953-0.service - Process Core Dump (PID 36953/UID 0). ░░ Subject: A start job for unit systemd-coredump@17-36953-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@17-36953-0.service has finished successfully. ░░ ░░ The job identifier is 2863. Mar 07 11:46:39 managed-node2 systemd[29271]: Started podman-36940.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 136. Mar 07 11:46:39 managed-node2 systemd[29271]: Started podman-pause-7a6a691e.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 140. Mar 07 11:46:39 managed-node2 systemd-coredump[36955]: Resource limits disable core dumping for process 36946 (catatonit). Mar 07 11:46:39 managed-node2 systemd-coredump[36955]: Process 36946 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 36946 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 36946 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:39 managed-node2 systemd[1]: systemd-coredump@17-36953-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@17-36953-0.service has successfully entered the 'dead' state. Mar 07 11:46:39 managed-node2 sudo[36930]: pam_unix(sudo:session): session closed for user podman_basic_user Mar 07 11:46:39 managed-node2 python3.12[37114]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:46:40 managed-node2 python3.12[37276]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:46:48 managed-node2 podman[36689]: time="2026-03-07T11:46:48-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Mar 07 11:46:48 managed-node2 systemd[1]: libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope has successfully entered the 'dead' state. Mar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.588635338 -0500 EST m=+10.080129492 container died e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Mar 07 11:46:48 managed-node2 systemd[1]: var-lib-containers-storage-overlay-8566ab6694c8721216d4c8d23c7b9a958cf4d626dd678590315232721a4623bc-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-8566ab6694c8721216d4c8d23c7b9a958cf4d626dd678590315232721a4623bc-merged.mount has successfully entered the 'dead' state. Mar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.624299419 -0500 EST m=+10.115793441 container cleanup e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:48 managed-node2 systemd[1]: Removed slice machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice - cgroup machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice. ░░ Subject: A stop job for unit machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice has finished. ░░ ░░ The job identifier is 2872 and the job result is done. Mar 07 11:46:48 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:48 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Mar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.658377824 -0500 EST m=+10.149871873 container remove e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Mar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.679532751 -0500 EST m=+10.171026794 container remove f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.686508191 -0500 EST m=+10.178002210 pod remove 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a (image=, name=httpd3) Mar 07 11:46:48 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.714211879 -0500 EST m=+10.205706029 container remove 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:48 managed-node2 podman[36689]: Pods stopped: Mar 07 11:46:48 managed-node2 podman[36689]: 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a Mar 07 11:46:48 managed-node2 podman[36689]: Pods removed: Mar 07 11:46:48 managed-node2 podman[36689]: 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a Mar 07 11:46:48 managed-node2 podman[36689]: Secrets removed: Mar 07 11:46:48 managed-node2 podman[36689]: Volumes removed: Mar 07 11:46:48 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state. Mar 07 11:46:49 managed-node2 python3.12[37452]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Clean up storage.conf] *************************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:466 Saturday 07 March 2026 11:46:49 -0500 (0:00:00.451) 0:03:34.728 ******** ok: [managed-node2] => { "changed": false, "path": "/etc/containers/storage.conf", "state": "absent" } TASK [Clean up host directories] *********************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:473 Saturday 07 March 2026 11:46:49 -0500 (0:00:00.388) 0:03:35.117 ******** changed: [managed-node2] => { "changed": true, "path": "/tmp/lsr_od4netlk_podman", "state": "absent" } TASK [Remove kube file src] **************************************************** task path: /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:480 Saturday 07 March 2026 11:46:49 -0500 (0:00:00.372) 0:03:35.489 ******** changed: [managed-node2 -> localhost] => { "changed": true, "path": "/tmp/lsr_podman_6snohudj.yml", "state": "absent" } PLAY RECAP ********************************************************************* managed-node2 : ok=355 changed=42 unreachable=0 failed=1 skipped=578 rescued=2 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.17.14", "end_time": "2026-03-07T16:44:40.590112+00:00Z", "host": "managed-node2", "message": "Output: \nError=Trying to pull quay.io/linux-system-roles/this_is_a_bogus_image:latest...\nError: unable to copy from source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized\n", "start_time": "2026-03-07T16:44:39.932106+00:00Z", "task_name": "Update containers/pods", "task_path": "/tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:63" }, { "ansible_version": "2.17.14", "delta": "0:00:00.125389", "end_time": "2026-03-07 11:46:39.284510", "host": "managed-node2", "loop_item": [ "httpd1", "podman_basic_user", 3001 ], "loop_label": "", "loop_var": "item", "message": "non-zero return code", "rc": 125, "start_time": "2026-03-07 11:46:39.159121", "stderr": "Error: no such pod httpd1", "task_name": "Check if pods are running", "task_path": "/tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:302" }, { "ansible_version": "2.17.14", "delta": "0:00:00.026013", "end_time": "2026-03-07 11:46:39.672378", "host": "managed-node2", "loop_item": [ "httpd2", "root", 0 ], "loop_label": "", "loop_var": "item", "message": "non-zero return code", "rc": 125, "start_time": "2026-03-07 11:46:39.646365", "stderr": "Error: no such pod httpd2", "task_name": "Check if pods are running", "task_path": "/tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:302" }, { "ansible_version": "2.17.14", "delta": "0:00:08.668131", "end_time": "2026-03-07 11:46:48.695788", "host": "managed-node2", "loop_item": [ "httpd3", "root", 0 ], "loop_label": "", "loop_var": "item", "message": "non-zero return code", "rc": 125, "start_time": "2026-03-07 11:46:40.027657", "stderr": "Error: no such pod httpd3", "task_name": "Check if pods are running", "task_path": "/tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:302" }, { "ansible_version": "2.17.14", "delta": "0:00:00.035089", "end_time": "2026-03-07 11:46:49.106342", "host": "managed-node2", "message": "", "rc": 0, "start_time": "2026-03-07 11:46:49.071253", "stdout": "Mar 07 11:43:59 managed-node2 python3.12[13345]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:44:00 managed-node2 python3.12[13500]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:44:00 managed-node2 python3.12[13655]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:44:01 managed-node2 python3.12[13811]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:44:02 managed-node2 python3.12[13967]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None\nMar 07 11:44:03 managed-node2 python3.12[14124]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nMar 07 11:44:03 managed-node2 systemd[1]: Reload requested from client PID 14127 ('systemctl') (unit session-7.scope)...\nMar 07 11:44:03 managed-node2 systemd[1]: Reloading...\nMar 07 11:44:03 managed-node2 systemd-rc-local-generator[14179]: /etc/rc.d/rc.local is not marked executable, skipping.\nMar 07 11:44:03 managed-node2 systemd[1]: Reloading finished in 205 ms.\nMar 07 11:44:03 managed-node2 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon...\n\u2591\u2591 Subject: A start job for unit firewalld.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit firewalld.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1651.\nMar 07 11:44:04 managed-node2 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon.\n\u2591\u2591 Subject: A start job for unit firewalld.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit firewalld.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1651.\nMar 07 11:44:05 managed-node2 python3.12[14387]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nMar 07 11:44:05 managed-node2 python3.12[14542]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:44:06 managed-node2 python3.12[14697]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:44:06 managed-node2 python3.12[14852]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:44:07 managed-node2 python3.12[15008]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:44:08 managed-node2 python3.12[15164]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:44:08 managed-node2 dbus-broker-launch[739]: Noticed file-system modification, trigger reload.\n\u2591\u2591 Subject: A configuration directory was written to\n\u2591\u2591 Defined-By: dbus-broker\n\u2591\u2591 Support: https://groups.google.com/forum/#!forum/bus1-devel\n\u2591\u2591 \n\u2591\u2591 A write was detected to one of the directories containing D-Bus configuration\n\u2591\u2591 files, triggering a configuration reload.\n\u2591\u2591 \n\u2591\u2591 This functionality exists for backwards compatibility to pick up changes to\n\u2591\u2591 D-Bus configuration without an explicit reolad request. Typically when\n\u2591\u2591 installing or removing third-party software causes D-Bus configuration files\n\u2591\u2591 to be added or removed.\n\u2591\u2591 \n\u2591\u2591 It is worth noting that this may cause partial configuration to be loaded in\n\u2591\u2591 case dispatching this notification races with the writing of the configuration\n\u2591\u2591 files. However, a future notification will then cause the configuration to be\n\u2591\u2591 reladed again.\nMar 07 11:44:08 managed-node2 dbus-broker-launch[739]: Noticed file-system modification, trigger reload.\n\u2591\u2591 Subject: A configuration directory was written to\n\u2591\u2591 Defined-By: dbus-broker\n\u2591\u2591 Support: https://groups.google.com/forum/#!forum/bus1-devel\n\u2591\u2591 \n\u2591\u2591 A write was detected to one of the directories containing D-Bus configuration\n\u2591\u2591 files, triggering a configuration reload.\n\u2591\u2591 \n\u2591\u2591 This functionality exists for backwards compatibility to pick up changes to\n\u2591\u2591 D-Bus configuration without an explicit reolad request. Typically when\n\u2591\u2591 installing or removing third-party software causes D-Bus configuration files\n\u2591\u2591 to be added or removed.\n\u2591\u2591 \n\u2591\u2591 It is worth noting that this may cause partial configuration to be loaded in\n\u2591\u2591 case dispatching this notification races with the writing of the configuration\n\u2591\u2591 files. However, a future notification will then cause the configuration to be\n\u2591\u2591 reladed again.\nMar 07 11:44:08 managed-node2 systemd[1]: Started run-p15170-i15171.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update.\n\u2591\u2591 Subject: A start job for unit run-p15170-i15171.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit run-p15170-i15171.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1737.\nMar 07 11:44:08 managed-node2 systemd[1]: Starting man-db-cache-update.service...\n\u2591\u2591 Subject: A start job for unit man-db-cache-update.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit man-db-cache-update.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1815.\nMar 07 11:44:09 managed-node2 python3.12[15332]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nMar 07 11:44:09 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit man-db-cache-update.service has successfully entered the 'dead' state.\nMar 07 11:44:09 managed-node2 systemd[1]: Finished man-db-cache-update.service.\n\u2591\u2591 Subject: A start job for unit man-db-cache-update.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit man-db-cache-update.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1815.\nMar 07 11:44:09 managed-node2 systemd[1]: run-p15170-i15171.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-p15170-i15171.service has successfully entered the 'dead' state.\nMar 07 11:44:10 managed-node2 python3.12[15520]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:44:11 managed-node2 python3.12[15676]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nMar 07 11:44:12 managed-node2 kernel: SELinux: Converting 500 SID table entries...\nMar 07 11:44:12 managed-node2 kernel: SELinux: policy capability network_peer_controls=1\nMar 07 11:44:12 managed-node2 kernel: SELinux: policy capability open_perms=1\nMar 07 11:44:12 managed-node2 kernel: SELinux: policy capability extended_socket_class=1\nMar 07 11:44:12 managed-node2 kernel: SELinux: policy capability always_check_network=0\nMar 07 11:44:12 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1\nMar 07 11:44:12 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1\nMar 07 11:44:12 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1\nMar 07 11:44:12 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0\nMar 07 11:44:12 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0\nMar 07 11:44:12 managed-node2 python3.12[15835]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nMar 07 11:44:16 managed-node2 python3.12[15990]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:44:17 managed-node2 python3.12[16147]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:44:17 managed-node2 python3.12[16302]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:44:17 managed-node2 python3.12[16457]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nMar 07 11:44:18 managed-node2 python3.12[16582]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901857.642029-10578-203368904599909/.source.yml _original_basename=.e_b4ms88 follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:44:18 managed-node2 python3.12[16737]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nMar 07 11:44:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-compat2437484652-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-compat2437484652-merged.mount has successfully entered the 'dead' state.\nMar 07 11:44:18 managed-node2 kernel: evm: overlay not supported\nMar 07 11:44:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\\x2dcheck1662577311-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-metacopy\\x2dcheck1662577311-merged.mount has successfully entered the 'dead' state.\nMar 07 11:44:18 managed-node2 podman[16744]: 2026-03-07 11:44:18.827309846 -0500 EST m=+0.070462949 system refresh\nMar 07 11:44:18 managed-node2 podman[16744]: 2026-03-07 11:44:18.828665684 -0500 EST m=+0.071818888 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge)\nMar 07 11:44:18 managed-node2 systemd[1]: Created slice machine.slice - Slice /machine.\n\u2591\u2591 Subject: A start job for unit machine.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1894.\nMar 07 11:44:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice - cgroup machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1893.\nMar 07 11:44:18 managed-node2 podman[16744]: 2026-03-07 11:44:18.87970198 -0500 EST m=+0.122855095 container create 246afbb22b17d10477ddd5d5c90f2d7d06c004c92b0b0defa7b3a3a43e4ecbe5 (image=, name=992c9586519a-infra, pod_id=992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09)\nMar 07 11:44:18 managed-node2 podman[16744]: 2026-03-07 11:44:18.883851527 -0500 EST m=+0.127004609 pod create 992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09 (image=, name=nopull)\nMar 07 11:44:19 managed-node2 podman[16744]: 2026-03-07 11:44:19.68863062 -0500 EST m=+0.931783734 container create d4fc0055deaed372cb505f1296fe8e33f059d4ac3adf6ad0c54243b547cbb4c4 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z)\nMar 07 11:44:19 managed-node2 podman[16744]: 2026-03-07 11:44:19.66621481 -0500 EST m=+0.909368011 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nMar 07 11:44:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:44:22 managed-node2 python3.12[17081]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:44:22 managed-node2 python3.12[17242]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:44:24 managed-node2 python3.12[17399]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:44:25 managed-node2 python3.12[17555]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None\nMar 07 11:44:26 managed-node2 python3.12[17712]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nMar 07 11:44:27 managed-node2 python3.12[17869]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nMar 07 11:44:28 managed-node2 python3.12[18024]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:44:29 managed-node2 python3.12[18180]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:44:30 managed-node2 python3.12[18336]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:44:31 managed-node2 python3.12[18492]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nMar 07 11:44:32 managed-node2 python3.12[18676]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nMar 07 11:44:33 managed-node2 python3.12[18831]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nMar 07 11:44:36 managed-node2 python3.12[18986]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:44:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:44:38 managed-node2 podman[19151]: 2026-03-07 11:44:38.376455047 -0500 EST m=+0.166868827 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest unable to copy from source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized\nMar 07 11:44:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:44:38 managed-node2 python3.12[19313]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:44:39 managed-node2 python3.12[19468]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:44:39 managed-node2 python3.12[19623]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nMar 07 11:44:39 managed-node2 python3.12[19748]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901879.29425-11520-176634290947885/.source.yml _original_basename=.gt65xr01 follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:44:40 managed-node2 python3.12[19903]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nMar 07 11:44:40 managed-node2 podman[19910]: 2026-03-07 11:44:40.341496717 -0500 EST m=+0.013739324 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge)\nMar 07 11:44:40 managed-node2 systemd[1]: Created slice machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice - cgroup machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1899.\nMar 07 11:44:40 managed-node2 podman[19910]: 2026-03-07 11:44:40.376773579 -0500 EST m=+0.049016203 container create 1e62cdef56136721315b848a501f951852d27e5af5ee669a7ef1724aa57fbf3a (image=, name=a7c38d962220-infra, pod_id=a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0)\nMar 07 11:44:40 managed-node2 podman[19910]: 2026-03-07 11:44:40.381063787 -0500 EST m=+0.053306319 pod create a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0 (image=, name=bogus)\nMar 07 11:44:40 managed-node2 podman[19910]: 2026-03-07 11:44:40.531228812 -0500 EST m=+0.203471494 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest unable to copy from source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized\nMar 07 11:44:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:44:42 managed-node2 python3.12[20227]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:44:43 managed-node2 python3.12[20389]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:44:45 managed-node2 python3.12[20546]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:44:46 managed-node2 python3.12[20702]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None\nMar 07 11:44:47 managed-node2 python3.12[20859]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nMar 07 11:44:48 managed-node2 python3.12[21016]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nMar 07 11:44:49 managed-node2 python3.12[21171]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:44:50 managed-node2 python3.12[21327]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:44:51 managed-node2 python3.12[21483]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:44:52 managed-node2 python3.12[21639]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nMar 07 11:44:53 managed-node2 python3.12[21823]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nMar 07 11:44:54 managed-node2 python3.12[21978]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nMar 07 11:44:57 managed-node2 python3.12[22133]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:44:58 managed-node2 python3.12[22290]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:44:58 managed-node2 python3.12[22446]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nMar 07 11:44:59 managed-node2 python3.12[22603]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:44:59 managed-node2 python3.12[22760]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nMar 07 11:44:59 managed-node2 python3.12[22760]: ansible-containers.podman.podman_play version: 5.8.0, kube file /etc/containers/ansible-kubernetes.d/nopull.yml\nMar 07 11:44:59 managed-node2 podman[22767]: 2026-03-07 11:44:59.831024987 -0500 EST m=+0.021885022 pod stop 992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09 (image=, name=nopull)\nMar 07 11:44:59 managed-node2 systemd[1]: Removed slice machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice - cgroup machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1905 and the job result is done.\nMar 07 11:44:59 managed-node2 podman[22767]: 2026-03-07 11:44:59.862282915 -0500 EST m=+0.053142851 container remove d4fc0055deaed372cb505f1296fe8e33f059d4ac3adf6ad0c54243b547cbb4c4 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nMar 07 11:44:59 managed-node2 podman[22767]: 2026-03-07 11:44:59.882439363 -0500 EST m=+0.073299300 container remove 246afbb22b17d10477ddd5d5c90f2d7d06c004c92b0b0defa7b3a3a43e4ecbe5 (image=, name=992c9586519a-infra, pod_id=992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09)\nMar 07 11:44:59 managed-node2 podman[22767]: 2026-03-07 11:44:59.88990995 -0500 EST m=+0.080769856 pod remove 992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09 (image=, name=nopull)\nMar 07 11:44:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:45:00 managed-node2 python3.12[22931]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:45:00 managed-node2 python3.12[23086]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:45:00 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:45:03 managed-node2 python3.12[23403]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:45:03 managed-node2 python3.12[23564]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:45:05 managed-node2 python3.12[23721]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:45:06 managed-node2 python3.12[23877]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None\nMar 07 11:45:07 managed-node2 python3.12[24034]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nMar 07 11:45:08 managed-node2 python3.12[24191]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nMar 07 11:45:09 managed-node2 python3.12[24346]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:45:10 managed-node2 python3.12[24502]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:45:11 managed-node2 python3.12[24658]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:45:11 managed-node2 python3.12[24814]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nMar 07 11:45:13 managed-node2 python3.12[24998]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nMar 07 11:45:13 managed-node2 python3.12[25153]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nMar 07 11:45:17 managed-node2 python3.12[25308]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:45:18 managed-node2 python3.12[25465]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:45:19 managed-node2 python3.12[25622]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nMar 07 11:45:19 managed-node2 python3.12[25779]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:45:20 managed-node2 python3.12[25936]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nMar 07 11:45:20 managed-node2 python3.12[25936]: ansible-containers.podman.podman_play version: 5.8.0, kube file /etc/containers/ansible-kubernetes.d/bogus.yml\nMar 07 11:45:20 managed-node2 podman[25943]: 2026-03-07 11:45:20.102000803 -0500 EST m=+0.020362137 pod stop a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0 (image=, name=bogus)\nMar 07 11:45:20 managed-node2 systemd[1]: Removed slice machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice - cgroup machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1907 and the job result is done.\nMar 07 11:45:20 managed-node2 podman[25943]: 2026-03-07 11:45:20.136901485 -0500 EST m=+0.055262816 container remove 1e62cdef56136721315b848a501f951852d27e5af5ee669a7ef1724aa57fbf3a (image=, name=a7c38d962220-infra, pod_id=a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0)\nMar 07 11:45:20 managed-node2 podman[25943]: 2026-03-07 11:45:20.143954123 -0500 EST m=+0.062315426 pod remove a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0 (image=, name=bogus)\nMar 07 11:45:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:45:20 managed-node2 python3.12[26107]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:45:20 managed-node2 python3.12[26262]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:45:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:45:23 managed-node2 python3.12[26580]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:45:24 managed-node2 python3.12[26741]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:45:27 managed-node2 python3.12[26898]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:45:28 managed-node2 python3.12[27054]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None\nMar 07 11:45:28 managed-node2 python3.12[27211]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nMar 07 11:45:29 managed-node2 python3.12[27368]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nMar 07 11:45:30 managed-node2 python3.12[27523]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:45:31 managed-node2 python3.12[27679]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:45:32 managed-node2 python3.12[27835]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nMar 07 11:45:33 managed-node2 python3.12[27991]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nMar 07 11:45:34 managed-node2 python3.12[28175]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nMar 07 11:45:35 managed-node2 python3.12[28330]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nMar 07 11:45:39 managed-node2 python3.12[28486]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nMar 07 11:45:39 managed-node2 python3.12[28642]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:45:39 managed-node2 python3.12[28799]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:45:40 managed-node2 python3.12[28955]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:45:41 managed-node2 python3.12[29111]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:45:41 managed-node2 python3.12[29267]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nMar 07 11:45:41 managed-node2 systemd[1]: Created slice user-3001.slice - User Slice of UID 3001.\n\u2591\u2591 Subject: A start job for unit user-3001.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user-3001.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1987.\nMar 07 11:45:41 managed-node2 systemd[1]: Starting user-runtime-dir@3001.service - User Runtime Directory /run/user/3001...\n\u2591\u2591 Subject: A start job for unit user-runtime-dir@3001.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user-runtime-dir@3001.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1909.\nMar 07 11:45:41 managed-node2 systemd[1]: Finished user-runtime-dir@3001.service - User Runtime Directory /run/user/3001.\n\u2591\u2591 Subject: A start job for unit user-runtime-dir@3001.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user-runtime-dir@3001.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1909.\nMar 07 11:45:41 managed-node2 systemd[1]: Starting user@3001.service - User Manager for UID 3001...\n\u2591\u2591 Subject: A start job for unit user@3001.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user@3001.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1989.\nMar 07 11:45:41 managed-node2 systemd-logind[759]: New session 9 of user podman_basic_user.\n\u2591\u2591 Subject: A new session 9 has been created for user podman_basic_user\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: sd-login(3)\n\u2591\u2591 \n\u2591\u2591 A new session with the ID 9 has been created for the user podman_basic_user.\n\u2591\u2591 \n\u2591\u2591 The leading process of the session is 29271.\nMar 07 11:45:41 managed-node2 (systemd)[29271]: pam_unix(systemd-user:session): session opened for user podman_basic_user(uid=3001) by podman_basic_user(uid=0)\nMar 07 11:45:41 managed-node2 systemd[29271]: Queued start job for default target default.target.\nMar 07 11:45:41 managed-node2 systemd[29271]: Created slice app.slice - User Application Slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 7.\nMar 07 11:45:41 managed-node2 systemd[29271]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 11.\nMar 07 11:45:41 managed-node2 systemd[29271]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 10.\nMar 07 11:45:41 managed-node2 systemd[29271]: Reached target paths.target - Paths.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3.\nMar 07 11:45:41 managed-node2 systemd[29271]: Reached target timers.target - Timers.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 9.\nMar 07 11:45:41 managed-node2 systemd[29271]: Starting dbus.socket - D-Bus User Message Bus Socket...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 6.\nMar 07 11:45:41 managed-node2 systemd[29271]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 12.\nMar 07 11:45:41 managed-node2 systemd[29271]: Listening on dbus.socket - D-Bus User Message Bus Socket.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 6.\nMar 07 11:45:41 managed-node2 systemd[29271]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 12.\nMar 07 11:45:41 managed-node2 systemd[29271]: Reached target sockets.target - Sockets.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5.\nMar 07 11:45:41 managed-node2 systemd[29271]: Reached target basic.target - Basic System.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2.\nMar 07 11:45:41 managed-node2 systemd[29271]: Reached target default.target - Main User Target.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1.\nMar 07 11:45:41 managed-node2 systemd[29271]: Startup finished in 67ms.\n\u2591\u2591 Subject: User manager start-up is now complete\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The user manager instance for user 3001 has been started. All services queued\n\u2591\u2591 for starting have been started. Note that other services might still be starting\n\u2591\u2591 up or be started at any later time.\n\u2591\u2591 \n\u2591\u2591 Startup of the manager took 67799 microseconds.\nMar 07 11:45:41 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001.\n\u2591\u2591 Subject: A start job for unit user@3001.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user@3001.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1989.\nMar 07 11:45:42 managed-node2 python3.12[29442]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:45:42 managed-node2 python3.12[29597]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:45:43 managed-node2 sudo[29802]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xhibmgzsvzhrmfekymwxsnjugtcfcwcf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901942.8189244-14095-168859344459604/AnsiballZ_podman_image.py'\nMar 07 11:45:43 managed-node2 sudo[29802]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nMar 07 11:45:43 managed-node2 kernel: catatonit[29817]: segfault at a9b80 ip 00007fdf17f46dbb sp 00007fffc0c93fa0 error 4 in catatonit[4dbb,7fdf17f43000+77000] likely on CPU 0 (core 0, socket 0)\nMar 07 11:45:43 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:45:43 managed-node2 systemd-coredump[29824]: Process 29817 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:45:43 managed-node2 systemd[1]: Created slice system-systemd\\x2dcoredump.slice - Slice /system/systemd-coredump.\n\u2591\u2591 Subject: A start job for unit system-systemd\\x2dcoredump.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit system-systemd\\x2dcoredump.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2075.\nMar 07 11:45:43 managed-node2 systemd[1]: Started systemd-coredump@0-29824-0.service - Process Core Dump (PID 29824/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@0-29824-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@0-29824-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2070.\nMar 07 11:45:43 managed-node2 systemd[29271]: Created slice session.slice - User Core Session Slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 14.\nMar 07 11:45:43 managed-node2 systemd[29271]: Starting dbus-broker.service - D-Bus User Message Bus...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 13.\nMar 07 11:45:43 managed-node2 systemd[29271]: Started dbus-broker.service - D-Bus User Message Bus.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 13.\nMar 07 11:45:43 managed-node2 dbus-broker-launch[29829]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored\nMar 07 11:45:43 managed-node2 dbus-broker-launch[29829]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored\nMar 07 11:45:43 managed-node2 systemd-coredump[29826]: Resource limits disable core dumping for process 29817 (catatonit).\nMar 07 11:45:43 managed-node2 systemd-coredump[29826]: Process 29817 (catatonit) of user 3001 terminated abnormally without generating a coredump.\n\u2591\u2591 Subject: Process 29817 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 29817 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:45:43 managed-node2 systemd[1]: systemd-coredump@0-29824-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@0-29824-0.service has successfully entered the 'dead' state.\nMar 07 11:45:43 managed-node2 dbus-broker-launch[29829]: Ready\nMar 07 11:45:43 managed-node2 systemd[29271]: Created slice user.slice - Slice /user.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 20.\nMar 07 11:45:43 managed-node2 systemd[29271]: Started podman-29812.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 19.\nMar 07 11:45:43 managed-node2 kernel: catatonit[29846]: segfault at a9b80 ip 00007ff7160afdbb sp 00007fff8cfc2e40 error 4 in catatonit[4dbb,7ff7160ac000+77000] likely on CPU 0 (core 0, socket 0)\nMar 07 11:45:43 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:45:43 managed-node2 systemd-coredump[29853]: Process 29846 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:45:43 managed-node2 systemd[1]: Started systemd-coredump@1-29853-0.service - Process Core Dump (PID 29853/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@1-29853-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@1-29853-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2079.\nMar 07 11:45:43 managed-node2 systemd[29271]: Started podman-29841.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 23.\nMar 07 11:45:43 managed-node2 systemd-coredump[29856]: Resource limits disable core dumping for process 29846 (catatonit).\nMar 07 11:45:43 managed-node2 systemd-coredump[29856]: Process 29846 (catatonit) of user 3001 terminated abnormally without generating a coredump.\n\u2591\u2591 Subject: Process 29846 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 29846 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:45:43 managed-node2 systemd[1]: systemd-coredump@1-29853-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@1-29853-0.service has successfully entered the 'dead' state.\nMar 07 11:45:44 managed-node2 kernel: catatonit[29891]: segfault at a9b80 ip 00007fc382ae6dbb sp 00007fff833112d0 error 4 in catatonit[4dbb,7fc382ae3000+77000] likely on CPU 1 (core 0, socket 0)\nMar 07 11:45:44 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:45:44 managed-node2 systemd-coredump[29898]: Process 29891 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:45:44 managed-node2 systemd[1]: Started systemd-coredump@2-29898-0.service - Process Core Dump (PID 29898/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@2-29898-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@2-29898-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2088.\nMar 07 11:45:44 managed-node2 systemd[29271]: Started podman-29886.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 27.\nMar 07 11:45:44 managed-node2 systemd[29271]: Started podman-pause-320fc37c.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 31.\nMar 07 11:45:44 managed-node2 systemd-coredump[29900]: Resource limits disable core dumping for process 29891 (catatonit).\nMar 07 11:45:44 managed-node2 systemd-coredump[29900]: Process 29891 (catatonit) of user 3001 terminated abnormally without generating a coredump.\n\u2591\u2591 Subject: Process 29891 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 29891 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:45:44 managed-node2 systemd[1]: systemd-coredump@2-29898-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@2-29898-0.service has successfully entered the 'dead' state.\nMar 07 11:45:44 managed-node2 systemd[29271]: Started podman-29905.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 35.\nMar 07 11:45:44 managed-node2 kernel: catatonit[29927]: segfault at a9b80 ip 00007f18b4071dbb sp 00007fffc0fc07f0 error 4 in catatonit[4dbb,7f18b406e000+77000] likely on CPU 1 (core 0, socket 0)\nMar 07 11:45:44 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:45:44 managed-node2 systemd-coredump[29935]: Process 29927 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:45:44 managed-node2 systemd[1]: Started systemd-coredump@3-29935-0.service - Process Core Dump (PID 29935/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@3-29935-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@3-29935-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2097.\nMar 07 11:45:44 managed-node2 systemd[29271]: Started podman-29922.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 39.\nMar 07 11:45:44 managed-node2 systemd-coredump[29936]: Resource limits disable core dumping for process 29927 (catatonit).\nMar 07 11:45:44 managed-node2 systemd-coredump[29936]: Process 29927 (catatonit) of user 3001 terminated abnormally without generating a coredump.\n\u2591\u2591 Subject: Process 29927 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 29927 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:45:44 managed-node2 systemd[29271]: podman-pause-bf3bda8f.scope: Couldn't move process 29927 to requested cgroup '/user.slice/user-3001.slice/user@3001.service/user.slice/podman-pause-bf3bda8f.scope' (directly or via the system bus): No such process\nMar 07 11:45:44 managed-node2 systemd[29271]: podman-pause-bf3bda8f.scope: Failed to add PIDs to scope's control group: Permission denied\nMar 07 11:45:44 managed-node2 systemd[29271]: podman-pause-bf3bda8f.scope: Failed with result 'resources'.\n\u2591\u2591 Subject: Unit failed\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit UNIT has entered the 'failed' state with result 'resources'.\nMar 07 11:45:44 managed-node2 systemd[1]: systemd-coredump@3-29935-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@3-29935-0.service has successfully entered the 'dead' state.\nMar 07 11:45:44 managed-node2 systemd[29271]: Failed to start podman-pause-bf3bda8f.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has failed\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished with a failure.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 43 and the job result is failed.\nMar 07 11:45:44 managed-node2 kernel: catatonit[29953]: segfault at a9b80 ip 00007faeb6efadbb sp 00007ffc7421de50 error 4 in catatonit[4dbb,7faeb6ef7000+77000] likely on CPU 1 (core 0, socket 0)\nMar 07 11:45:44 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:45:44 managed-node2 systemd-coredump[29961]: Process 29953 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:45:44 managed-node2 systemd[1]: Started systemd-coredump@4-29961-0.service - Process Core Dump (PID 29961/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@4-29961-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@4-29961-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2106.\nMar 07 11:45:44 managed-node2 systemd[29271]: Started podman-29948.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 47.\nMar 07 11:45:44 managed-node2 systemd-coredump[29963]: Resource limits disable core dumping for process 29953 (catatonit).\nMar 07 11:45:44 managed-node2 systemd-coredump[29963]: Process 29953 (catatonit) of user 3001 terminated abnormally without generating a coredump.\n\u2591\u2591 Subject: Process 29953 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 29953 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:45:44 managed-node2 systemd[29271]: Started podman-pause-ed80bfc9.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 51.\nMar 07 11:45:44 managed-node2 systemd[1]: systemd-coredump@4-29961-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@4-29961-0.service has successfully entered the 'dead' state.\nMar 07 11:45:44 managed-node2 sudo[29802]: pam_unix(sudo:session): session closed for user podman_basic_user\nMar 07 11:45:44 managed-node2 python3.12[30123]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:45:45 managed-node2 python3.12[30278]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:45:45 managed-node2 python3.12[30433]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nMar 07 11:45:46 managed-node2 python3.12[30558]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901945.4660614-14184-34473031875573/.source.yml _original_basename=.8iqqb013 follow=False checksum=5a374c59230176d446e6cd38bcc64da326c45092 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:45:46 managed-node2 sudo[30763]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oerbzxzssklczavswqvqximbbvrtsmtu ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901946.1921344-14217-62861233194905/AnsiballZ_podman_play.py'\nMar 07 11:45:46 managed-node2 sudo[30763]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nMar 07 11:45:46 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nMar 07 11:45:46 managed-node2 kernel: catatonit[30786]: segfault at a9b80 ip 00007f77afc58dbb sp 00007fff81ee37a0 error 4 in catatonit[4dbb,7f77afc55000+77000] likely on CPU 1 (core 0, socket 0)\nMar 07 11:45:46 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:45:46 managed-node2 systemd-coredump[30794]: Process 30786 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:45:46 managed-node2 systemd[1]: Started systemd-coredump@5-30794-0.service - Process Core Dump (PID 30794/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@5-30794-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@5-30794-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2115.\nMar 07 11:45:46 managed-node2 systemd[29271]: Started podman-30780.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 55.\nMar 07 11:45:46 managed-node2 systemd[29271]: Created slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice - cgroup user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 59.\nMar 07 11:45:46 managed-node2 systemd-coredump[30796]: Resource limits disable core dumping for process 30786 (catatonit).\nMar 07 11:45:46 managed-node2 systemd-coredump[30796]: Process 30786 (catatonit) of user 3001 terminated abnormally without generating a coredump.\n\u2591\u2591 Subject: Process 30786 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 30786 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:45:46 managed-node2 systemd[1]: systemd-coredump@5-30794-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@5-30794-0.service has successfully entered the 'dead' state.\nMar 07 11:45:46 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6\nMar 07 11:45:46 managed-node2 systemd[29271]: Started rootless-netns-8fa057a2.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 63.\nMar 07 11:45:46 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this.\nMar 07 11:45:46 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:45:46 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:45:46 managed-node2 kernel: veth0: entered allmulticast mode\nMar 07 11:45:46 managed-node2 kernel: veth0: entered promiscuous mode\nMar 07 11:45:46 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:45:46 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nMar 07 11:45:46 managed-node2 systemd[29271]: Started run-p30834-i30835.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 67.\nMar 07 11:45:46 managed-node2 aardvark-dns[30834]: starting aardvark on a child with pid 30835\nMar 07 11:45:46 managed-node2 aardvark-dns[30835]: Successfully parsed config\nMar 07 11:45:46 managed-node2 aardvark-dns[30835]: Listen v4 ip {\"podman-default-kube-network\": [10.89.0.1]}\nMar 07 11:45:46 managed-node2 aardvark-dns[30835]: Listen v6 ip {}\nMar 07 11:45:46 managed-node2 aardvark-dns[30835]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53]\nMar 07 11:45:46 managed-node2 conmon[30852]: conmon a532637f985bd7708dd5 : failed to write to /proc/self/oom_score_adj: Permission denied\nMar 07 11:45:46 managed-node2 systemd[29271]: Started libpod-conmon-a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 71.\nMar 07 11:45:46 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/15/attach}\nMar 07 11:45:46 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : terminal_ctrl_fd: 15\nMar 07 11:45:46 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : winsz read side: 18, winsz write side: 19\nMar 07 11:45:47 managed-node2 systemd[29271]: Started libpod-a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 76.\nMar 07 11:45:47 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : container PID: 30855\nMar 07 11:45:47 managed-node2 kernel: catatonit[30855]: segfault at a9b80 ip 00007f9031569dbb sp 00007ffe6192e700 error 4 in catatonit[4dbb,7f9031566000+77000] likely on CPU 1 (core 0, socket 0)\nMar 07 11:45:47 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:45:47 managed-node2 systemd-coredump[30857]: Process 30855 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:45:47 managed-node2 systemd[1]: Started systemd-coredump@6-30857-0.service - Process Core Dump (PID 30857/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@6-30857-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@6-30857-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2124.\nMar 07 11:45:47 managed-node2 conmon[30859]: conmon ee4a1b77972d6a790be3 : failed to write to /proc/self/oom_score_adj: Permission denied\nMar 07 11:45:47 managed-node2 systemd[29271]: Started libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 81.\nMar 07 11:45:47 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach}\nMar 07 11:45:47 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : terminal_ctrl_fd: 14\nMar 07 11:45:47 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : winsz read side: 17, winsz write side: 18\nMar 07 11:45:47 managed-node2 systemd[29271]: Started libpod-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 86.\nMar 07 11:45:47 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : container PID: 30863\nMar 07 11:45:47 managed-node2 systemd-coredump[30858]: Resource limits disable core dumping for process 30855 (catatonit).\nMar 07 11:45:47 managed-node2 systemd-coredump[30858]: Process 30855 (catatonit) of user 3001 terminated abnormally without generating a coredump.\n\u2591\u2591 Subject: Process 30855 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 30855 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:45:47 managed-node2 systemd[1]: systemd-coredump@6-30857-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@6-30857-0.service has successfully entered the 'dead' state.\nMar 07 11:45:47 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : container 30855 exited with status 139\nMar 07 11:45:47 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nMar 07 11:45:47 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod:\n 46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399\n Container:\n ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\n \nMar 07 11:45:47 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"/bin/podman filtering at log level debug\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"systemd-logind: Unknown object '/'.\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using transient store: false\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Not configuring container store\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Creating a new rootless user namespace\"\n time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"/bin/podman filtering at log level debug\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"systemd-logind: Unknown object '/'.\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using transient store: false\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Successfully loaded 1 networks\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"found free device name podman1\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"found free ipv4 network subnet 10.89.0.0/24\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Created cgroup path user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice for parent user.slice and name libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Created cgroup user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"no command or entrypoint provided, and no CMD or ENTRYPOINT from image: defaulting to empty string\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"setting container name 46ab0de2b796-infra\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 196e978a743fccc03fb8ddd2d41a1f9a15d160f55231f9844a9070e6a9ce61ba bridge podman1 2026-03-07 11:45:46.711313386 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Allocated lock 1 for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Cached value indicated that idmapped mounts for overlay are not supported\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Check for idmapped mounts support \"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Created container \\\"a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Container \\\"a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\\\" has work directory \\\"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Container \\\"a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\\\" has run directory \\\"/run/user/3001/containers/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Pulling image quay.io/libpod/testimage:20210610 (policy: missing)\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"adding container to pod httpd1\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"setting container name httpd1-httpd1\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Adding mount /proc\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Adding mount /dev\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Adding mount /dev/pts\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Adding mount /dev/mqueue\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Adding mount /sys\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Adding mount /sys/fs/cgroup\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Allocated lock 2 for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Created container \\\"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Container \\\"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\\\" has work directory \\\"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Container \\\"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\\\" has run directory \\\"/run/user/3001/containers/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Strongconnecting node a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Pushed a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 onto stack\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Finishing node a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431. Popped a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 off stack\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Strongconnecting node ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Pushed ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 onto stack\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Finishing node ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431. Popped ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 off stack\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Made network namespace at /run/user/3001/netns/netns-dcd66955-fe96-f197-416b-aad9b87d86cb for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Created root filesystem for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/rootfs/merge\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Creating rootless network namespace at \\\"/run/user/3001/containers/networks/rootless-netns/rootless-netns\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"The path of /etc/resolv.conf in the mount ns is \\\"/etc/resolv.conf\\\"\"\n [DEBUG netavark::network::validation] Validating network namespace...\n [DEBUG netavark::commands::setup] Setting up...\n [INFO netavark::firewall] Using nftables firewall driver\n [DEBUG netavark::network::bridge] Setup network podman-default-kube-network\n [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24]\n [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24]\n [DEBUG netavark::network::bridge] Using mtu 65520 from default route interface for the network\n [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/ip_forward to 1\n [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/route_localnet to 1\n [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/rp_filter to 2\n [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv6/conf/eth0/autoconf to 0\n [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/arp_notify to 1\n [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/rp_filter to 2\n [INFO netavark::network::netlink_route] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100)\n [INFO netavark::firewall::nft] Creating container chain nv_196e978a_10_89_0_0_nm24\n [DEBUG netavark::dns::aardvark] Spawning aardvark server\n [DEBUG netavark::dns::aardvark] start aardvark-dns: [\"systemd-run\", \"-q\", \"--scope\", \"--user\", \"/usr/libexec/podman/aardvark-dns\", \"--config\", \"/run/user/3001/containers/networks/aardvark-dns\", \"-p\", \"53\", \"run\"]\n [DEBUG netavark::commands::setup] {\n \"podman-default-kube-network\": StatusBlock {\n dns_search_domains: Some(\n [\n \"dns.podman\",\n ],\n ),\n dns_server_ips: Some(\n [\n 10.89.0.1,\n ],\n ),\n interfaces: Some(\n {\n \"eth0\": NetInterface {\n mac_address: \"d6:83:e5:9a:8f:77\",\n subnets: Some(\n [\n NetAddress {\n gateway: Some(\n 10.89.0.1,\n ),\n ipnet: 10.89.0.2/24,\n },\n ],\n ),\n },\n },\n ),\n },\n }\n [DEBUG netavark::commands::setup] Setup complete\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"rootlessport: time=\\\"2026-03-07T11:45:46-05:00\\\" level=info msg=\\\"Starting parent driver\\\"\\n\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"rootlessport: time=\\\"2026-03-07T11:45:46-05:00\\\" level=info msg=\\\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport112898145/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport112898145/.bp.sock]\\\"\\n\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"rootlessport: time=\\\"2026-03-07T11:45:46-05:00\\\" level=info msg=\\\"Starting child driver in child netns (\\\\\\\"/proc/self/exe\\\\\\\" [rootlessport-child])\\\"\\n\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"rootlessport: time=\\\"2026-03-07T11:45:46-05:00\\\" level=info msg=\\\"Waiting for initComplete\\\"\\n\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"rootlessport: time=\\\"2026-03-07T11:45:46-05:00\\\" level=info msg=\\\"initComplete is closed; parent and child established the communication channel\\\"\\n\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"rootlessport: time=\\\"2026-03-07T11:45:46-05:00\\\" level=info msg=\\\"Exposing ports [{ 80 15001 1 tcp}]\\\"\\n\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"rootlessport: time=\\\"2026-03-07T11:45:46-05:00\\\" level=info msg=Ready\\n\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"rootlessport is ready\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Setting Cgroups for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 to user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice:libpod:a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Workdir \\\"/\\\" resolved to host path \\\"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/rootfs/merge\\\"\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Created OCI spec for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/config.json\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Created cgroup path user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice for parent user.slice and name libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Created cgroup user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2026-03-07T11:45:46-05:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 -u a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata -p /run/user/3001/containers/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/pidfile -n 46ab0de2b796-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431]\"\n time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Running conmon under slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice and unitName libpod-conmon-a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431.scope\"\n [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Received: 30855\"\n time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"Got Conmon PID as 30853\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Created container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 in OCI runtime\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Adding nameserver(s) from network status of '[\\\"10.89.0.1\\\"]'\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Adding search domain(s) from network status of '[\\\"dns.podman\\\"]'\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Starting container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 with command [/catatonit -P]\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Started container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/JH6ZGMTVFTMNRQA4DOPAJEHHJA,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/work,userxattr,context=\\\"system_u:object_r:container_file_t:s0:c60,c376\\\"\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Mounted container \\\"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\\\" at \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/merged\\\"\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Created root filesystem for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 at /home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/merged\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Setting Cgroups for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 to user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice:libpod:ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Workdir \\\"/var/www\\\" resolved to a volume or mount\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Created OCI spec for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/config.json\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Created cgroup path user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice for parent user.slice and name libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Created cgroup user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 -u ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata -p /run/user/3001/containers/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431]\"\n time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"Running conmon under slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice and unitName libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope\"\n [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Received: 30863\"\n time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"Got Conmon PID as 30861\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Created container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 in OCI runtime\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Starting container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 with command [/bin/busybox-extras httpd -f -p 80]\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Started container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\"\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=30780\n time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Shutting down engines\"\nMar 07 11:45:47 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431)\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"Setting parallel job count to 7\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"Using sqlite as database backend\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"systemd-logind: Unknown object '/'.\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Using graph driver overlay\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Using transient store: false\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Initializing event backend file\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\nMar 07 11:45:47 managed-node2 sudo[30763]: pam_unix(sudo:session): session closed for user podman_basic_user\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Cleaning up container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Tearing down network namespace at /run/user/3001/netns/netns-dcd66955-fe96-f197-416b-aad9b87d86cb for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 196e978a743fccc03fb8ddd2d41a1f9a15d160f55231f9844a9070e6a9ce61ba bridge podman1 2026-03-07 11:45:46.711313386 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Successfully loaded 2 networks\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"The path of /etc/resolv.conf in the mount ns is \\\"/etc/resolv.conf\\\"\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"netavark: [DEBUG netavark::commands::teardown] Tearing down..\\n\"\nMar 07 11:45:47 managed-node2 aardvark-dns[30835]: Received SIGHUP\nMar 07 11:45:47 managed-node2 aardvark-dns[30835]: Successfully parsed config\nMar 07 11:45:47 managed-node2 aardvark-dns[30835]: Listen v4 ip {}\nMar 07 11:45:47 managed-node2 aardvark-dns[30835]: Listen v6 ip {}\nMar 07 11:45:47 managed-node2 aardvark-dns[30835]: No configuration found stopping the sever\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"netavark: [INFO netavark::firewall] Using nftables firewall driver\\n\"\nMar 07 11:45:47 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:45:47 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nMar 07 11:45:47 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nMar 07 11:45:47 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"netavark: [INFO netavark::network::bridge] removing bridge podman1\\n\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \\\"netavark\\\", chain: \\\"INPUT\\\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \\\"ip\\\", field: \\\"saddr\\\" }))), right: Named(Prefix(Prefix { addr: String(\\\"10.89.0.0\\\"), len: 24 })), op: EQ }), Match(Match { left: Named(Meta(Meta { key: L4proto })), right: Named(Set([Element(String(\\\"tcp\\\")), Element(String(\\\"udp\\\"))])), op: EQ }), Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \\\"th\\\", field: \\\"dport\\\" }))), right: Number(53), op: EQ }), Accept(None)], handle: Some(23), index: None, comment: None }\\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \\\"netavark\\\", chain: \\\"FORWARD\\\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \\\"ip\\\", field: \\\"daddr\\\" }))), right: Named(Prefix(Prefix { addr: String(\\\"10.89.0.0\\\"), len: 24 })), op: EQ }), Match(Match { left: Named(CT(CT { key: \\\"state\\\", family: None, dir: None })), right: List([String(\\\"established\\\"), String(\\\"related\\\")]), op: IN }), Accept(None)], handle: Some(24), index: None, comment: None }\\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \\\"netavark\\\", chain: \\\"FORWARD\\\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \\\"ip\\\", field: \\\"saddr\\\" }))), right: Named(Prefix(Prefix { addr: String(\\\"10.89.0.0\\\"), len: 24 })), op: EQ }), Accept(None)], handle: Some(25), index: None, comment: None }\\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \\\"netavark\\\", chain: \\\"POSTROUTING\\\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \\\"ip\\\", field: \\\"saddr\\\" }))), right: Named(Prefix(Prefix { addr: String(\\\"10.89.0.0\\\"), len: 24 })), op: EQ }), Jump(JumpTarget { target: \\\"nv_196e978a_10_89_0_0_nm24\\\" })], handle: Some(26), index: None, comment: None }\\n[DEBUG netavark::firewall::nft] Removing 4 rules\\n[DEBUG netavark::firewall::nft] Found chain nv_196e978a_10_89_0_0_nm24\\n\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \\\"netavark\\\", chain: \\\"NETAVARK-ISOLATION-3\\\", expr: [Match(Match { left: Named(Meta(Meta { key: Oifname })), right: String(\\\"podman1\\\"), op: EQ }), Drop(None)], handle: Some(17), index: None, comment: None }\\n[DEBUG netavark::firewall::nft] Removing 1 isolation rules for network\\n\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"netavark: [DEBUG netavark::firewall::nft] Found chain nv_196e978a_10_89_0_0_nm24_dnat\\n\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"netavark: [DEBUG netavark::firewall::nft] Found chain nv_196e978a_10_89_0_0_nm24_dnat\\n\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"netavark: [DEBUG netavark::commands::teardown] Teardown complete\\n\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Cleaning up rootless network namespace\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Successfully cleaned up container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431)\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=debug msg=\"Shutting down engines\"\nMar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time=\"2026-03-07T11:45:47-05:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=30868\nMar 07 11:45:47 managed-node2 sudo[31091]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dogisxyjglvsrovdvtkjavoxxllbgpoh ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901947.3123984-14267-185518984212991/AnsiballZ_systemd.py'\nMar 07 11:45:47 managed-node2 sudo[31091]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nMar 07 11:45:47 managed-node2 python3.12[31094]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nMar 07 11:45:47 managed-node2 systemd[29271]: Reload requested from client PID 31095 ('systemctl')...\nMar 07 11:45:47 managed-node2 systemd[29271]: Reloading...\nMar 07 11:45:47 managed-node2 systemd[29271]: Reloading finished in 42 ms.\nMar 07 11:45:47 managed-node2 sudo[31091]: pam_unix(sudo:session): session closed for user podman_basic_user\nMar 07 11:45:48 managed-node2 sudo[31309]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rictksfalktnaehelhfnnbscqeskopdj ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901947.9564757-14297-244281299619627/AnsiballZ_systemd.py'\nMar 07 11:45:48 managed-node2 sudo[31309]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nMar 07 11:45:48 managed-node2 python3.12[31312]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None\nMar 07 11:45:48 managed-node2 systemd[29271]: Reload requested from client PID 31315 ('systemctl')...\nMar 07 11:45:48 managed-node2 systemd[29271]: Reloading...\nMar 07 11:45:48 managed-node2 systemd[29271]: Reloading finished in 39 ms.\nMar 07 11:45:48 managed-node2 sudo[31309]: pam_unix(sudo:session): session closed for user podman_basic_user\nMar 07 11:45:48 managed-node2 sudo[31529]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nuvmtvywssfvscqhqidkfbcugrldioki ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901948.6670218-14329-160015858227475/AnsiballZ_systemd.py'\nMar 07 11:45:48 managed-node2 sudo[31529]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nMar 07 11:45:49 managed-node2 python3.12[31532]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nMar 07 11:45:49 managed-node2 systemd[29271]: Created slice app-podman\\x2dkube.slice - Slice /app/podman-kube.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 103.\nMar 07 11:45:49 managed-node2 systemd[29271]: Starting podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 91.\nMar 07 11:45:49 managed-node2 kernel: catatonit[31544]: segfault at a9b80 ip 00007f1befc8ddbb sp 00007ffc46cfc240 error 4 in catatonit[4dbb,7f1befc8a000+77000] likely on CPU 0 (core 0, socket 0)\nMar 07 11:45:49 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:45:49 managed-node2 systemd-coredump[31550]: Process 31544 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:45:49 managed-node2 systemd[1]: Started systemd-coredump@7-31550-0.service - Process Core Dump (PID 31550/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@7-31550-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@7-31550-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2135.\nMar 07 11:45:49 managed-node2 systemd-coredump[31553]: Process 31544 (catatonit) of user 3001 dumped core.\n \n Module /usr/libexec/catatonit/catatonit from rpm catatonit-0.2.1-3.el10.x86_64\n Stack trace of thread 31544:\n #0 0x00007f1befc8ddbb __libc_setup_tls (/usr/libexec/catatonit/catatonit + 0x4dbb)\n #1 0x00007f1befc8da79 __libc_start_main_impl (/usr/libexec/catatonit/catatonit + 0x4a79)\n #2 0x00007f1befc8b4e5 _start (/usr/libexec/catatonit/catatonit + 0x24e5)\n ELF object binary architecture: AMD x86-64\n\u2591\u2591 Subject: Process 31544 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 31544 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:45:49 managed-node2 systemd[1]: systemd-coredump@7-31550-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@7-31550-0.service has successfully entered the 'dead' state.\nMar 07 11:45:59 managed-node2 podman[31542]: time=\"2026-03-07T11:45:59-05:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL\"\nMar 07 11:45:59 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : container 30863 exited with status 137\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431)\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=info msg=\"Setting parallel job count to 7\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=info msg=\"Using sqlite as database backend\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"systemd-logind: Unknown object '/'.\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Using graph driver overlay\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Using transient store: false\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Initializing event backend file\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431)\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=debug msg=\"Shutting down engines\"\nMar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time=\"2026-03-07T11:45:59-05:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=31562\nMar 07 11:45:59 managed-node2 systemd[29271]: Stopping libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope...\n\u2591\u2591 Subject: A stop job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 105.\nMar 07 11:45:59 managed-node2 systemd[29271]: Stopped libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 105 and the job result is done.\nMar 07 11:45:59 managed-node2 systemd[29271]: Removed slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice - cgroup user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 104 and the job result is done.\nMar 07 11:45:59 managed-node2 podman[31542]: Pods stopped:\nMar 07 11:45:59 managed-node2 podman[31542]: 46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399\nMar 07 11:45:59 managed-node2 podman[31542]: Pods removed:\nMar 07 11:45:59 managed-node2 podman[31542]: 46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399\nMar 07 11:45:59 managed-node2 podman[31542]: Secrets removed:\nMar 07 11:45:59 managed-node2 podman[31542]: Volumes removed:\nMar 07 11:45:59 managed-node2 systemd[29271]: Created slice user-libpod_pod_02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a.slice - cgroup user-libpod_pod_02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a.slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 107.\nMar 07 11:45:59 managed-node2 systemd[29271]: Started libpod-879669150b8fd150356b3d47d7f340be20e76730c97501db0eb82939c5fc9bd1.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 111.\nMar 07 11:45:59 managed-node2 kernel: catatonit[31575]: segfault at a9b80 ip 00007f139f341dbb sp 00007fff16d48850 error 4 in catatonit[4dbb,7f139f33e000+77000] likely on CPU 1 (core 0, socket 0)\nMar 07 11:45:59 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:45:59 managed-node2 systemd-coredump[31577]: Process 31575 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:45:59 managed-node2 systemd[1]: Started systemd-coredump@8-31577-0.service - Process Core Dump (PID 31577/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@8-31577-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@8-31577-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2144.\nMar 07 11:45:59 managed-node2 systemd[29271]: Started rootless-netns-461281de.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 115.\nMar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:45:59 managed-node2 kernel: veth0: entered allmulticast mode\nMar 07 11:45:59 managed-node2 kernel: veth0: entered promiscuous mode\nMar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nMar 07 11:45:59 managed-node2 systemd-coredump[31579]: Process 31575 (catatonit) of user 3001 dumped core.\n \n Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64\n Stack trace of thread 1:\n #0 0x00007f139f341dbb n/a (/catatonit + 0x4dbb)\n ELF object binary architecture: AMD x86-64\n\u2591\u2591 Subject: Process 31575 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 31575 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:45:59 managed-node2 systemd[1]: systemd-coredump@8-31577-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@8-31577-0.service has successfully entered the 'dead' state.\nMar 07 11:45:59 managed-node2 conmon[31573]: conmon 879669150b8fd150356b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-3001.slice/user@3001.service/user.slice/libpod-879669150b8fd150356b3d47d7f340be20e76730c97501db0eb82939c5fc9bd1.scope/container/memory.events\nMar 07 11:45:59 managed-node2 systemd[29271]: Started run-p31598-i31599.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 119.\nMar 07 11:45:59 managed-node2 systemd[29271]: Started libpod-fa374669b33b370f44864c8cec6d40cc032a256cc26f6dca99aab760c7fd4b53.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 123.\nMar 07 11:45:59 managed-node2 kernel: catatonit[31627]: segfault at a9b80 ip 00007fb82f5dbdbb sp 00007fff96168e50 error 4 in catatonit[4dbb,7fb82f5d8000+77000] likely on CPU 1 (core 0, socket 0)\nMar 07 11:45:59 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:45:59 managed-node2 systemd-coredump[31632]: Process 31627 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:45:59 managed-node2 systemd[1]: Started systemd-coredump@9-31632-0.service - Process Core Dump (PID 31632/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@9-31632-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@9-31632-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2153.\nMar 07 11:45:59 managed-node2 systemd[29271]: Started libpod-309bf1ae864fe28aa1049bfc71c43fecc7211dc1b56ee779eeec10eb7def34c6.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 128.\nMar 07 11:45:59 managed-node2 podman[31542]: Pod:\nMar 07 11:45:59 managed-node2 systemd[29271]: podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range\nMar 07 11:45:59 managed-node2 systemd[29271]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 91.\nMar 07 11:45:59 managed-node2 podman[31542]: 02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a\nMar 07 11:45:59 managed-node2 podman[31542]: Container:\nMar 07 11:45:59 managed-node2 podman[31542]: 309bf1ae864fe28aa1049bfc71c43fecc7211dc1b56ee779eeec10eb7def34c6\nMar 07 11:45:59 managed-node2 systemd-coredump[31633]: Process 31627 (catatonit) of user 3001 dumped core.\n \n Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64\n Stack trace of thread 1:\n #0 0x00007fb82f5dbdbb n/a (/catatonit + 0x4dbb)\n ELF object binary architecture: AMD x86-64\n\u2591\u2591 Subject: Process 31627 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 31627 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:45:59 managed-node2 systemd[1]: systemd-coredump@9-31632-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@9-31632-0.service has successfully entered the 'dead' state.\nMar 07 11:45:59 managed-node2 sudo[31529]: pam_unix(sudo:session): session closed for user podman_basic_user\nMar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:45:59 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nMar 07 11:45:59 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nMar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:45:59 managed-node2 kernel: catatonit[31700]: segfault at a9b80 ip 00007fea7c676dbb sp 00007ffef7d06ba0 error 4 in catatonit[4dbb,7fea7c673000+77000] likely on CPU 0 (core 0, socket 0)\nMar 07 11:45:59 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:45:59 managed-node2 systemd-coredump[31705]: Process 31700 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:45:59 managed-node2 systemd[1]: Started systemd-coredump@10-31705-0.service - Process Core Dump (PID 31705/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@10-31705-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@10-31705-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2162.\nMar 07 11:46:00 managed-node2 systemd-coredump[31708]: Process 31700 (catatonit) of user 3001 dumped core.\n \n Module /usr/libexec/catatonit/catatonit from rpm catatonit-0.2.1-3.el10.x86_64\n Stack trace of thread 31700:\n #0 0x00007fea7c676dbb __libc_setup_tls (/usr/libexec/catatonit/catatonit + 0x4dbb)\n #1 0x00007fea7c676a79 __libc_start_main_impl (/usr/libexec/catatonit/catatonit + 0x4a79)\n #2 0x00007fea7c6744e5 _start (/usr/libexec/catatonit/catatonit + 0x24e5)\n ELF object binary architecture: AMD x86-64\n\u2591\u2591 Subject: Process 31700 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 31700 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:46:00 managed-node2 systemd[1]: systemd-coredump@10-31705-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@10-31705-0.service has successfully entered the 'dead' state.\nMar 07 11:46:00 managed-node2 python3.12[31846]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nMar 07 11:46:00 managed-node2 python3.12[32002]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:46:01 managed-node2 python3.12[32159]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:46:02 managed-node2 python3.12[32315]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:46:02 managed-node2 python3.12[32470]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:46:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:46:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:46:03 managed-node2 podman[32648]: 2026-03-07 11:46:03.898635121 -0500 EST m=+0.392627575 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nMar 07 11:46:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:46:04 managed-node2 python3.12[32839]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:46:04 managed-node2 python3.12[32994]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:46:05 managed-node2 python3.12[33149]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nMar 07 11:46:05 managed-node2 python3.12[33274]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901964.9849694-14845-101719091169581/.source.yml _original_basename=.e466nazg follow=False checksum=3ff675c4424d0c6a65148416b04367244e5cae81 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:46:05 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nMar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.010232203 -0500 EST m=+0.016945230 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge)\nMar 07 11:46:06 managed-node2 systemd[1]: Created slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice - cgroup machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2171.\nMar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.051819164 -0500 EST m=+0.058532100 container create ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58)\nMar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.055915225 -0500 EST m=+0.062628141 pod create 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2)\nMar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.081531085 -0500 EST m=+0.088244108 container create 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1039] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3)\nMar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.05840266 -0500 EST m=+0.065115702 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nMar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:06 managed-node2 kernel: veth0: entered allmulticast mode\nMar 07 11:46:06 managed-node2 kernel: veth0: entered promiscuous mode\nMar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nMar 07 11:46:06 managed-node2 (udev-worker)[33449]: Network interface NamePolicy= disabled on kernel command line.\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1191] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4)\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1201] device (veth0): carrier: link connected\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1203] device (podman1): carrier: link connected\nMar 07 11:46:06 managed-node2 (udev-worker)[33448]: Network interface NamePolicy= disabled on kernel command line.\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1470] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1475] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1481] device (podman1): Activation: starting connection 'podman1' (7024fc22-fe75-4cea-afb7-75608193f035)\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1484] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external')\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1486] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external')\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1488] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external')\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1531] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')\nMar 07 11:46:06 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2177.\nMar 07 11:46:06 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2177.\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1973] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1976] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external')\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1982] device (podman1): Activation: successful, device activated.\nMar 07 11:46:06 managed-node2 systemd[1]: Started run-p33482-i33483.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit run-p33482-i33483.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit run-p33482-i33483.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2256.\nMar 07 11:46:06 managed-node2 aardvark-dns[33482]: starting aardvark on a child with pid 33489\nMar 07 11:46:06 managed-node2 aardvark-dns[33489]: Successfully parsed config\nMar 07 11:46:06 managed-node2 aardvark-dns[33489]: Listen v4 ip {\"podman-default-kube-network\": [10.89.0.1]}\nMar 07 11:46:06 managed-node2 aardvark-dns[33489]: Listen v6 ip {}\nMar 07 11:46:06 managed-node2 aardvark-dns[33489]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53]\nMar 07 11:46:06 managed-node2 systemd[1]: Started libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope.\n\u2591\u2591 Subject: A start job for unit libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2262.\nMar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach}\nMar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : terminal_ctrl_fd: 12\nMar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : winsz read side: 16, winsz write side: 17\nMar 07 11:46:06 managed-node2 systemd[1]: Started libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2269.\nMar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : container PID: 33495\nMar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.278669784 -0500 EST m=+0.285382817 container init ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58)\nMar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.281608185 -0500 EST m=+0.288321275 container start ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58)\nMar 07 11:46:06 managed-node2 kernel: catatonit[33495]: segfault at a9b80 ip 00007f4ad6a29dbb sp 00007ffcbefcdc50 error 4 in catatonit[4dbb,7f4ad6a26000+77000] likely on CPU 1 (core 0, socket 0)\nMar 07 11:46:06 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:46:06 managed-node2 systemd-coredump[33497]: Process 33495 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:46:06 managed-node2 systemd[1]: Started systemd-coredump@11-33497-0.service - Process Core Dump (PID 33497/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@11-33497-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@11-33497-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2276.\nMar 07 11:46:06 managed-node2 systemd[1]: Started libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope.\n\u2591\u2591 Subject: A start job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2285.\nMar 07 11:46:06 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/11/attach}\nMar 07 11:46:06 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : terminal_ctrl_fd: 11\nMar 07 11:46:06 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : winsz read side: 15, winsz write side: 16\nMar 07 11:46:06 managed-node2 systemd[1]: Started libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2292.\nMar 07 11:46:06 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : container PID: 33503\nMar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.352403787 -0500 EST m=+0.359116948 container init 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nMar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.357465195 -0500 EST m=+0.364178282 container start 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test)\nMar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.361993922 -0500 EST m=+0.368706973 pod start 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2)\nMar 07 11:46:06 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nMar 07 11:46:06 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod:\n 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58\n Container:\n 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\n \nMar 07 11:46:06 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using transient store: false\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Initializing event backend journald\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d bridge podman1 2026-03-07 11:44:18.828483768 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice for parent machine.slice and name libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"no command or entrypoint provided, and no CMD or ENTRYPOINT from image: defaulting to empty string\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"setting container name 09b7f33e3afd-infra\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Allocated lock 1 for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Cached value indicated that idmapped mounts for overlay are supported\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created container \\\"ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Container \\\"ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Container \\\"ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\\\" has run directory \\\"/run/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Pulling image quay.io/libpod/testimage:20210610 (policy: missing)\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"adding container to pod httpd2\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"setting container name httpd2-httpd2\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Adding mount /proc\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Adding mount /dev\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Adding mount /dev/pts\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Adding mount /dev/mqueue\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Adding mount /sys\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Adding mount /sys/fs/cgroup\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Allocated lock 2 for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created container \\\"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Container \\\"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Container \\\"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\\\" has run directory \\\"/run/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Strongconnecting node ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Pushed ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b onto stack\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Finishing node ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b. Popped ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b off stack\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Strongconnecting node 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Pushed 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 onto stack\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Finishing node 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08. Popped 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 off stack\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Made network namespace at /run/netns/netns-57a5144e-40ac-4a85-01ac-9226ddb3e6f8 for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created root filesystem for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b at /var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/rootfs/merge\"\n [DEBUG netavark::network::validation] Validating network namespace...\n [DEBUG netavark::commands::setup] Setting up...\n [INFO netavark::firewall] Using nftables firewall driver\n [DEBUG netavark::network::bridge] Setup network podman-default-kube-network\n [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24]\n [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24]\n [DEBUG netavark::network::bridge] Using mtu 9001 from default route interface for the network\n [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/ip_forward to 1\n [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/route_localnet to 1\n [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/rp_filter to 2\n [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv6/conf/eth0/autoconf to 0\n [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/arp_notify to 1\n [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/rp_filter to 2\n [INFO netavark::network::netlink_route] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100)\n [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24\n [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source\n [INFO netavark::firewall::nft] Creating container chain nv_52ab27bf_10_89_0_0_nm24\n [DEBUG netavark::dns::aardvark] Spawning aardvark server\n [DEBUG netavark::dns::aardvark] start aardvark-dns: [\"systemd-run\", \"-q\", \"--scope\", \"/usr/libexec/podman/aardvark-dns\", \"--config\", \"/run/containers/networks/aardvark-dns\", \"-p\", \"53\", \"run\"]\n [DEBUG netavark::commands::setup] {\n \"podman-default-kube-network\": StatusBlock {\n dns_search_domains: Some(\n [\n \"dns.podman\",\n ],\n ),\n dns_server_ips: Some(\n [\n 10.89.0.1,\n ],\n ),\n interfaces: Some(\n {\n \"eth0\": NetInterface {\n mac_address: \"ee:98:79:da:ba:e0\",\n subnets: Some(\n [\n NetAddress {\n gateway: Some(\n 10.89.0.1,\n ),\n ipnet: 10.89.0.2/24,\n },\n ],\n ),\n },\n },\n ),\n },\n }\n [DEBUG netavark::commands::setup] Setup complete\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Setting Cgroups for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b to machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice:libpod:ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Workdir \\\"/\\\" resolved to host path \\\"/var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/rootfs/merge\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created OCI spec for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b at /var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata/config.json\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice for parent machine.slice and name libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b -u ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata -p /run/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata/pidfile -n 09b7f33e3afd-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b]\"\n time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice and unitName libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Received: 33495\"\n time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Got Conmon PID as 33493\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b in OCI runtime\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Adding nameserver(s) from network status of '[\\\"10.89.0.1\\\"]'\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Adding search domain(s) from network status of '[\\\"dns.podman\\\"]'\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Starting container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b with command [/catatonit -P]\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Started container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/MVNQJ7CO6BHDSUCUPVG3N5YCHU,upperdir=/var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/diff,workdir=/var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/work,nodev,metacopy=on,context=\\\"system_u:object_r:container_file_t:s0:c91,c172\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Mounted container \\\"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\\\" at \\\"/var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/merged\\\"\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created root filesystem for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 at /var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/merged\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Setting Cgroups for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 to machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice:libpod:9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Workdir \\\"/var/www\\\" resolved to a volume or mount\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created OCI spec for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 at /var/lib/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata/config.json\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice for parent machine.slice and name libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 -u 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata -p /run/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08]\"\n time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice and unitName libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Received: 33503\"\n time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Got Conmon PID as 33500\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Created container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 in OCI runtime\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Starting container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 with command [/bin/busybox-extras httpd -f -p 80]\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Started container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Shutting down engines\"\n time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=33436\nMar 07 11:46:06 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nMar 07 11:46:06 managed-node2 systemd-coredump[33499]: Process 33495 (catatonit) of user 0 dumped core.\n \n Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64\n Stack trace of thread 1:\n #0 0x00007f4ad6a29dbb n/a (/catatonit + 0x4dbb)\n ELF object binary architecture: AMD x86-64\n\u2591\u2591 Subject: Process 33495 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 33495 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:46:06 managed-node2 systemd[1]: libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has successfully entered the 'dead' state.\nMar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : container 33495 exited with status 139\nMar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice/libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope/container/memory.events\nMar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : Cgroup appears to have been removed, stopping OOM monitoring\nMar 07 11:46:06 managed-node2 systemd[1]: systemd-coredump@11-33497-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@11-33497-0.service has successfully entered the 'dead' state.\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b)\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Setting parallel job count to 7\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Using sqlite as database backend\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using graph driver overlay\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using run root /run/containers/storage\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using transient store: false\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Initializing event backend journald\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\nMar 07 11:46:06 managed-node2 podman[33506]: 2026-03-07 11:46:06.442858751 -0500 EST m=+0.031283817 container died ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra)\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Cleaning up container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Tearing down network namespace at /run/netns/netns-57a5144e-40ac-4a85-01ac-9226ddb3e6f8 for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d bridge podman1 2026-03-07 11:44:18.828483768 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Successfully loaded 2 networks\"\nMar 07 11:46:06 managed-node2 aardvark-dns[33489]: Received SIGHUP\nMar 07 11:46:06 managed-node2 aardvark-dns[33489]: Successfully parsed config\nMar 07 11:46:06 managed-node2 aardvark-dns[33489]: Listen v4 ip {}\nMar 07 11:46:06 managed-node2 aardvark-dns[33489]: Listen v6 ip {}\nMar 07 11:46:06 managed-node2 aardvark-dns[33489]: No configuration found stopping the sever\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"netavark: [DEBUG netavark::commands::teardown] Tearing down..\\n\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"netavark: [INFO netavark::firewall] Using nftables firewall driver\\n\"\nMar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:06 managed-node2 systemd[1]: run-p33482-i33483.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-p33482-i33483.scope has successfully entered the 'dead' state.\nMar 07 11:46:06 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nMar 07 11:46:06 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nMar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"netavark: [INFO netavark::network::bridge] removing bridge podman1\\n\"\nMar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.4783] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \\\"netavark\\\", chain: \\\"INPUT\\\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \\\"ip\\\", field: \\\"saddr\\\" }))), right: Named(Prefix(Prefix { addr: String(\\\"10.89.0.0\\\"), len: 24 })), op: EQ }), Match(Match { left: Named(Meta(Meta { key: L4proto })), right: Named(Set([Element(String(\\\"tcp\\\")), Element(String(\\\"udp\\\"))])), op: EQ }), Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \\\"th\\\", field: \\\"dport\\\" }))), right: Number(53), op: EQ }), Accept(None)], handle: Some(23), index: None, comment: None }\\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \\\"netavark\\\", chain: \\\"FORWARD\\\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \\\"ip\\\", field: \\\"daddr\\\" }))), right: Named(Prefix(Prefix { addr: String(\\\"10.89.0.0\\\"), len: 24 })), op: EQ }), Match(Match { left: Named(CT(CT { key: \\\"state\\\", family: None, dir: None })), right: List([String(\\\"established\\\"), String(\\\"related\\\")]), op: IN }), Accept(None)], handle: Some(24), index: None, comment: None }\\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \\\"netavark\\\", chain: \\\"FORWARD\\\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \\\"ip\\\", field: \\\"saddr\\\" }))), right: Named(Prefix(Prefix { addr: String(\\\"10.89.0.0\\\"), len: 24 })), op: EQ }), Accept(None)], handle: Some(25), index: None, comment: None }\\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \\\"netavark\\\", chain: \\\"POSTROUTING\\\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \\\"ip\\\", field: \\\"saddr\\\" }))), right: Named(Prefix(Prefix { addr: String(\\\"10.89.0.0\\\"), len: 24 })), op: EQ }), Jump(JumpTarget { target: \\\"nv_52ab27bf_10_89_0_0_nm24\\\" })], handle: Some(26), index: None, comment: None }\\n[DEBUG netavark::firewall::nft] Removing 4 rules\\n[DEBUG netavark::firewall::nft] Found chain nv_52ab27bf_10_89_0_0_nm24\\n\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"netavark: [DEBUG netavark::firewall::firewalld] Removing firewalld rules for IPs 10.89.0.0/24\\n\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \\\"netavark\\\", chain: \\\"NETAVARK-ISOLATION-3\\\", expr: [Match(Match { left: Named(Meta(Meta { key: Oifname })), right: String(\\\"podman1\\\"), op: EQ }), Drop(None)], handle: Some(17), index: None, comment: None }\\n[DEBUG netavark::firewall::nft] Removing 1 isolation rules for network\\n\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"netavark: [DEBUG netavark::firewall::nft] Found chain nv_52ab27bf_10_89_0_0_nm24_dnat\\n[DEBUG netavark::firewall::nft] Found chain nv_52ab27bf_10_89_0_0_nm24_dnat\\n\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"netavark: [DEBUG netavark::commands::teardown] Teardown complete\\n\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Successfully cleaned up container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\"\nMar 07 11:46:06 managed-node2 podman[33506]: 2026-03-07 11:46:06.54231054 -0500 EST m=+0.130735647 container cleanup ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58)\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b)\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=debug msg=\"Shutting down engines\"\nMar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time=\"2026-03-07T11:46:06-05:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=33506\nMar 07 11:46:06 managed-node2 systemd[1]: libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has successfully entered the 'dead' state.\nMar 07 11:46:06 managed-node2 python3.12[33684]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nMar 07 11:46:06 managed-node2 systemd[1]: Reload requested from client PID 33685 ('systemctl') (unit session-7.scope)...\nMar 07 11:46:06 managed-node2 systemd[1]: Reloading...\nMar 07 11:46:07 managed-node2 systemd-rc-local-generator[33727]: /etc/rc.d/rc.local is not marked executable, skipping.\nMar 07 11:46:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b-rootfs-merge.mount has successfully entered the 'dead' state.\nMar 07 11:46:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b-userdata-shm.mount has successfully entered the 'dead' state.\nMar 07 11:46:07 managed-node2 systemd[1]: run-netns-netns\\x2d57a5144e\\x2d40ac\\x2d4a85\\x2d01ac\\x2d9226ddb3e6f8.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d57a5144e\\x2d40ac\\x2d4a85\\x2d01ac\\x2d9226ddb3e6f8.mount has successfully entered the 'dead' state.\nMar 07 11:46:07 managed-node2 systemd[1]: Reloading finished in 222 ms.\nMar 07 11:46:07 managed-node2 python3.12[33906]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None\nMar 07 11:46:07 managed-node2 systemd[1]: Reload requested from client PID 33909 ('systemctl') (unit session-7.scope)...\nMar 07 11:46:07 managed-node2 systemd[1]: Reloading...\nMar 07 11:46:07 managed-node2 systemd-rc-local-generator[33960]: /etc/rc.d/rc.local is not marked executable, skipping.\nMar 07 11:46:07 managed-node2 systemd[1]: Reloading finished in 214 ms.\nMar 07 11:46:08 managed-node2 python3.12[34130]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nMar 07 11:46:08 managed-node2 systemd[1]: Created slice system-podman\\x2dkube.slice - Slice /system/podman-kube.\n\u2591\u2591 Subject: A start job for unit system-podman\\x2dkube.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit system-podman\\x2dkube.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2377.\nMar 07 11:46:08 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2299.\nMar 07 11:46:08 managed-node2 podman[34134]: 2026-03-07 11:46:08.575544958 -0500 EST m=+0.022390462 pod stop 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2)\nMar 07 11:46:10 managed-node2 podman[31698]: time=\"2026-03-07T11:46:10-05:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL\"\nMar 07 11:46:10 managed-node2 systemd[29271]: Removed slice user-libpod_pod_02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a.slice - cgroup user-libpod_pod_02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a.slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 133 and the job result is done.\nMar 07 11:46:10 managed-node2 podman[31698]: Pods stopped:\nMar 07 11:46:10 managed-node2 podman[31698]: 02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a\nMar 07 11:46:10 managed-node2 podman[31698]: Pods removed:\nMar 07 11:46:10 managed-node2 podman[31698]: 02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a\nMar 07 11:46:10 managed-node2 podman[31698]: Secrets removed:\nMar 07 11:46:10 managed-node2 podman[31698]: Volumes removed:\nMar 07 11:46:10 managed-node2 systemd[29271]: podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service: Consumed 753ms CPU time, 80.9M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit UNIT completed and consumed the indicated resources.\nMar 07 11:46:16 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nMar 07 11:46:18 managed-node2 podman[34134]: time=\"2026-03-07T11:46:18-05:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL\"\nMar 07 11:46:18 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : container 33503 exited with status 137\nMar 07 11:46:18 managed-node2 systemd[1]: libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has successfully entered the 'dead' state.\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.605105738 -0500 EST m=+10.051951370 container died 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z)\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08)\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=info msg=\"Setting parallel job count to 7\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=info msg=\"Using sqlite as database backend\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Using graph driver overlay\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Using run root /run/containers/storage\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Using transient store: false\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Initializing event backend journald\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\nMar 07 11:46:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054-merged.mount has successfully entered the 'dead' state.\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.639159774 -0500 EST m=+10.086005199 container cleanup 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z)\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=info msg=\"Received shutdown signal \\\"terminated\\\", terminating!\" PID=34157\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=info msg=\"Invoking shutdown handler \\\"libpod\\\"\" PID=34157\nMar 07 11:46:18 managed-node2 systemd[1]: Stopping libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope...\n\u2591\u2591 Subject: A stop job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2385.\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08)\"\nMar 07 11:46:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time=\"2026-03-07T11:46:18-05:00\" level=debug msg=\"Completed shutdown handler \\\"libpod\\\", duration 0s\" PID=34157\nMar 07 11:46:18 managed-node2 systemd[1]: libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has successfully entered the 'dead' state.\nMar 07 11:46:18 managed-node2 systemd[1]: Stopped libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope.\n\u2591\u2591 Subject: A stop job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2385 and the job result is done.\nMar 07 11:46:18 managed-node2 systemd[1]: Removed slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice - cgroup machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2384 and the job result is done.\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.687385492 -0500 EST m=+10.134230912 pod stop 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2)\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.7198332 -0500 EST m=+10.166678633 container remove 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.74111814 -0500 EST m=+10.187963572 container remove ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58)\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.748131526 -0500 EST m=+10.194976927 pod remove 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2)\nMar 07 11:46:18 managed-node2 podman[34134]: Pods stopped:\nMar 07 11:46:18 managed-node2 podman[34134]: 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58\nMar 07 11:46:18 managed-node2 podman[34134]: Pods removed:\nMar 07 11:46:18 managed-node2 podman[34134]: 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58\nMar 07 11:46:18 managed-node2 podman[34134]: Secrets removed:\nMar 07 11:46:18 managed-node2 podman[34134]: Volumes removed:\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.7485712 -0500 EST m=+10.195416609 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge)\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.765571666 -0500 EST m=+10.212417099 container create f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice - cgroup machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2388.\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.805707536 -0500 EST m=+10.252552936 container create 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.809765847 -0500 EST m=+10.256611247 pod create 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 (image=, name=httpd2)\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.811528719 -0500 EST m=+10.258374297 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.833756495 -0500 EST m=+10.280601983 container create 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.834103154 -0500 EST m=+10.280948599 container restart f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:18 managed-node2 systemd[1]: Started libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2394.\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.897730576 -0500 EST m=+10.344576055 container init f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.900084574 -0500 EST m=+10.346930106 container start f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:18 managed-node2 kernel: catatonit[34171]: segfault at a9b80 ip 00007ff9206acdbb sp 00007ffc502fe200 error 4 in catatonit[4dbb,7ff9206a9000+77000] likely on CPU 0 (core 0, socket 0)\nMar 07 11:46:18 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:46:18 managed-node2 systemd-coredump[34173]: Process 34171 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:18 managed-node2 systemd[1]: Started systemd-coredump@12-34173-0.service - Process Core Dump (PID 34173/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@12-34173-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@12-34173-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2400.\nMar 07 11:46:18 managed-node2 kernel: veth0: entered allmulticast mode\nMar 07 11:46:18 managed-node2 kernel: veth0: entered promiscuous mode\nMar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nMar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9395] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5)\nMar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nMar 07 11:46:18 managed-node2 (udev-worker)[34175]: Network interface NamePolicy= disabled on kernel command line.\nMar 07 11:46:18 managed-node2 (udev-worker)[34176]: Network interface NamePolicy= disabled on kernel command line.\nMar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9459] device (podman1): carrier: link connected\nMar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9484] device (veth0): carrier: link connected\nMar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9487] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6)\nMar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9699] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')\nMar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9734] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')\nMar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9762] device (podman1): Activation: starting connection 'podman1' (a476fb3c-4953-4a99-8c37-c91c928220c1)\nMar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9763] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external')\nMar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9766] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external')\nMar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9767] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external')\nMar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9770] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')\nMar 07 11:46:18 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2409.\nMar 07 11:46:19 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2409.\nMar 07 11:46:19 managed-node2 NetworkManager[807]: [1772901979.0167] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')\nMar 07 11:46:19 managed-node2 NetworkManager[807]: [1772901979.0175] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external')\nMar 07 11:46:19 managed-node2 NetworkManager[807]: [1772901979.0184] device (podman1): Activation: successful, device activated.\nMar 07 11:46:19 managed-node2 systemd[1]: Started run-p34216-i34217.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit run-p34216-i34217.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit run-p34216-i34217.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2488.\nMar 07 11:46:19 managed-node2 systemd-coredump[34177]: Process 34171 (catatonit) of user 0 dumped core.\n \n Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64\n Stack trace of thread 1:\n #0 0x00007ff9206acdbb n/a (/catatonit + 0x4dbb)\n ELF object binary architecture: AMD x86-64\n\u2591\u2591 Subject: Process 34171 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 34171 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:46:19 managed-node2 systemd[1]: libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope has successfully entered the 'dead' state.\nMar 07 11:46:19 managed-node2 conmon[34169]: conmon f021e1ac269371a1a5c6 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope/container/memory.events\nMar 07 11:46:19 managed-node2 systemd[1]: systemd-coredump@12-34173-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@12-34173-0.service has successfully entered the 'dead' state.\nMar 07 11:46:19 managed-node2 podman[34226]: 2026-03-07 11:46:19.118293703 -0500 EST m=+0.022854670 container died f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:19 managed-node2 systemd[1]: Started libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2494.\nMar 07 11:46:19 managed-node2 podman[34226]: 2026-03-07 11:46:19.144694335 -0500 EST m=+0.049255264 container cleanup f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.149467711 -0500 EST m=+10.596313208 container init 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.1518268 -0500 EST m=+10.598672204 container start 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:19 managed-node2 kernel: catatonit[34227]: segfault at a9b80 ip 00007f45bdca9dbb sp 00007fff72636820 error 4 in catatonit[4dbb,7f45bdca6000+77000] likely on CPU 1 (core 0, socket 0)\nMar 07 11:46:19 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:46:19 managed-node2 systemd-coredump[34240]: Process 34227 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:46:19 managed-node2 systemd[1]: Started systemd-coredump@13-34240-0.service - Process Core Dump (PID 34240/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@13-34240-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@13-34240-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2501.\nMar 07 11:46:19 managed-node2 systemd[1]: Started libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2510.\nMar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.197461095 -0500 EST m=+10.644306639 container init 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nMar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.200754871 -0500 EST m=+10.647600358 container start 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test)\nMar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.205111382 -0500 EST m=+10.651956899 pod start 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 (image=, name=httpd2)\nMar 07 11:46:19 managed-node2 podman[34134]: Pod:\nMar 07 11:46:19 managed-node2 podman[34134]: 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409\nMar 07 11:46:19 managed-node2 podman[34134]: Container:\nMar 07 11:46:19 managed-node2 podman[34134]: 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75\nMar 07 11:46:19 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range\nMar 07 11:46:19 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2299.\nMar 07 11:46:19 managed-node2 podman[34248]: 2026-03-07 11:46:19.270404055 -0500 EST m=+0.035233803 pod stop 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 (image=, name=httpd2)\nMar 07 11:46:19 managed-node2 systemd-coredump[34241]: Process 34227 (catatonit) of user 0 dumped core.\n \n Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64\n Stack trace of thread 1:\n #0 0x00007f45bdca9dbb n/a (/catatonit + 0x4dbb)\n ELF object binary architecture: AMD x86-64\n\u2591\u2591 Subject: Process 34227 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 34227 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:46:19 managed-node2 systemd[1]: libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope has successfully entered the 'dead' state.\nMar 07 11:46:19 managed-node2 systemd[1]: systemd-coredump@13-34240-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@13-34240-0.service has successfully entered the 'dead' state.\nMar 07 11:46:19 managed-node2 podman[34283]: 2026-03-07 11:46:19.327174521 -0500 EST m=+0.016512016 container died 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:19 managed-node2 systemd[1]: run-p34216-i34217.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-p34216-i34217.scope has successfully entered the 'dead' state.\nMar 07 11:46:19 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:19 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nMar 07 11:46:19 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nMar 07 11:46:19 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:19 managed-node2 NetworkManager[807]: [1772901979.3664] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nMar 07 11:46:19 managed-node2 podman[34283]: 2026-03-07 11:46:19.426367765 -0500 EST m=+0.115705152 container cleanup 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a-rootfs-merge.mount has successfully entered the 'dead' state.\nMar 07 11:46:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a-userdata-shm.mount has successfully entered the 'dead' state.\nMar 07 11:46:20 managed-node2 python3.12[34439]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:46:21 managed-node2 python3.12[34596]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:46:21 managed-node2 python3.12[34752]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:46:22 managed-node2 python3.12[34907]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:46:23 managed-node2 podman[35085]: 2026-03-07 11:46:23.121556991 -0500 EST m=+0.299999620 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nMar 07 11:46:23 managed-node2 python3.12[35275]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nMar 07 11:46:24 managed-node2 python3.12[35430]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:46:24 managed-node2 python3.12[35585]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nMar 07 11:46:24 managed-node2 python3.12[35710]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901984.2287433-15536-81376403194330/.source.yml _original_basename=.8zbijrin follow=False checksum=4ea4a304b347a6aaa397596e57cb6db94ea16b46 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nMar 07 11:46:25 managed-node2 python3.12[35865]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nMar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.275495475 -0500 EST m=+0.013172190 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge)\nMar 07 11:46:25 managed-node2 systemd[1]: Created slice machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice - cgroup machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2517.\nMar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.311817507 -0500 EST m=+0.049494315 container create 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d)\nMar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.316459547 -0500 EST m=+0.054136253 pod create 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d (image=, name=httpd3)\nMar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.342690191 -0500 EST m=+0.080367000 container create fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3550] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/7)\nMar 07 11:46:25 managed-node2 (udev-worker)[35883]: Network interface NamePolicy= disabled on kernel command line.\nMar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.318601722 -0500 EST m=+0.056278508 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nMar 07 11:46:25 managed-node2 kernel: veth0: entered allmulticast mode\nMar 07 11:46:25 managed-node2 kernel: veth0: entered promiscuous mode\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3822] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/8)\nMar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nMar 07 11:46:25 managed-node2 (udev-worker)[35885]: Network interface NamePolicy= disabled on kernel command line.\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3879] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3889] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3899] device (podman1): Activation: starting connection 'podman1' (6d8bf7d8-ae00-4628-b267-3ec15c9e992b)\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3904] device (veth0): carrier: link connected\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3907] device (podman1): carrier: link connected\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3909] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external')\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3915] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external')\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3918] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external')\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3922] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3944] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3948] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external')\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3952] device (podman1): Activation: successful, device activated.\nMar 07 11:46:25 managed-node2 systemd[1]: Started run-p35920-i35921.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit run-p35920-i35921.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit run-p35920-i35921.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2523.\nMar 07 11:46:25 managed-node2 systemd[1]: Started libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope.\n\u2591\u2591 Subject: A start job for unit libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2529.\nMar 07 11:46:25 managed-node2 systemd[1]: Started libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2536.\nMar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.521896266 -0500 EST m=+0.259573093 container init 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d)\nMar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.524546913 -0500 EST m=+0.262223671 container start 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d)\nMar 07 11:46:25 managed-node2 kernel: catatonit[35928]: segfault at a9b80 ip 00007f6f72c16dbb sp 00007ffefa9d1c40 error 4 in catatonit[4dbb,7f6f72c13000+77000] likely on CPU 0 (core 0, socket 0)\nMar 07 11:46:25 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:46:25 managed-node2 systemd-coredump[35930]: Process 35928 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:46:25 managed-node2 systemd[1]: Started systemd-coredump@14-35930-0.service - Process Core Dump (PID 35930/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@14-35930-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@14-35930-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2543.\nMar 07 11:46:25 managed-node2 systemd[1]: Started libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope.\n\u2591\u2591 Subject: A start job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2552.\nMar 07 11:46:25 managed-node2 systemd[1]: Started libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2559.\nMar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.585157387 -0500 EST m=+0.322834314 container init fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nMar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.589721993 -0500 EST m=+0.327398858 container start fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nMar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.594062087 -0500 EST m=+0.331738925 pod start 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d (image=, name=httpd3)\nMar 07 11:46:25 managed-node2 systemd-coredump[35931]: Process 35928 (catatonit) of user 0 dumped core.\n \n Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64\n Stack trace of thread 1:\n #0 0x00007f6f72c16dbb n/a (/catatonit + 0x4dbb)\n ELF object binary architecture: AMD x86-64\n\u2591\u2591 Subject: Process 35928 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 35928 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:46:25 managed-node2 systemd[1]: libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has successfully entered the 'dead' state.\nMar 07 11:46:25 managed-node2 systemd[1]: systemd-coredump@14-35930-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@14-35930-0.service has successfully entered the 'dead' state.\nMar 07 11:46:25 managed-node2 podman[35941]: 2026-03-07 11:46:25.677668355 -0500 EST m=+0.029940803 container died 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra)\nMar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:25 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nMar 07 11:46:25 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nMar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:25 managed-node2 systemd[1]: run-p35920-i35921.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-p35920-i35921.scope has successfully entered the 'dead' state.\nMar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.7169] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nMar 07 11:46:25 managed-node2 systemd[1]: run-netns-netns\\x2d642376bf\\x2d1b5a\\x2dbb61\\x2d6a3b\\x2dfe70bdea2608.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d642376bf\\x2d1b5a\\x2dbb61\\x2d6a3b\\x2dfe70bdea2608.mount has successfully entered the 'dead' state.\nMar 07 11:46:25 managed-node2 podman[35941]: 2026-03-07 11:46:25.791383924 -0500 EST m=+0.143656388 container cleanup 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d)\nMar 07 11:46:25 managed-node2 systemd[1]: libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has successfully entered the 'dead' state.\nMar 07 11:46:26 managed-node2 python3.12[36118]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nMar 07 11:46:26 managed-node2 systemd[1]: Reload requested from client PID 36119 ('systemctl') (unit session-7.scope)...\nMar 07 11:46:26 managed-node2 systemd[1]: Reloading...\nMar 07 11:46:26 managed-node2 systemd-rc-local-generator[36161]: /etc/rc.d/rc.local is not marked executable, skipping.\nMar 07 11:46:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1-userdata-shm.mount has successfully entered the 'dead' state.\nMar 07 11:46:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1-rootfs-merge.mount has successfully entered the 'dead' state.\nMar 07 11:46:26 managed-node2 systemd[1]: Reloading finished in 222 ms.\nMar 07 11:46:27 managed-node2 python3.12[36340]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None\nMar 07 11:46:27 managed-node2 systemd[1]: Reload requested from client PID 36343 ('systemctl') (unit session-7.scope)...\nMar 07 11:46:27 managed-node2 systemd[1]: Reloading...\nMar 07 11:46:27 managed-node2 systemd-rc-local-generator[36385]: /etc/rc.d/rc.local is not marked executable, skipping.\nMar 07 11:46:27 managed-node2 systemd[1]: Reloading finished in 215 ms.\nMar 07 11:46:27 managed-node2 python3.12[36564]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nMar 07 11:46:27 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2566.\nMar 07 11:46:27 managed-node2 podman[36568]: 2026-03-07 11:46:27.898059361 -0500 EST m=+0.023361638 pod stop 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d (image=, name=httpd3)\nMar 07 11:46:29 managed-node2 podman[34248]: time=\"2026-03-07T11:46:29-05:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL\"\nMar 07 11:46:29 managed-node2 systemd[1]: libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope has successfully entered the 'dead' state.\nMar 07 11:46:29 managed-node2 conmon[34243]: conmon 1d68ffb6e0e9d84fa87d : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice/libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope/container/memory.events\nMar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.301508379 -0500 EST m=+10.066338217 container died 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:29 managed-node2 systemd[1]: var-lib-containers-storage-overlay-0c3bc18cb09098ff9f2d69e62d54abe819e567e28ca84575505d06b7f7092c88-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-0c3bc18cb09098ff9f2d69e62d54abe819e567e28ca84575505d06b7f7092c88-merged.mount has successfully entered the 'dead' state.\nMar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.336665522 -0500 EST m=+10.101495209 container cleanup 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nMar 07 11:46:29 managed-node2 systemd[1]: Removed slice machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice - cgroup machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2651 and the job result is done.\nMar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.366179073 -0500 EST m=+10.131008702 container remove 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nMar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.387294034 -0500 EST m=+10.152123658 container remove 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.394892423 -0500 EST m=+10.159722015 pod remove 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 (image=, name=httpd2)\nMar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.413822806 -0500 EST m=+10.178652430 container remove f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nMar 07 11:46:29 managed-node2 podman[34248]: time=\"2026-03-07T11:46:29-05:00\" level=error msg=\"Checking whether service of container f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a can be stopped: no container with ID f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a found in database: no such container\"\nMar 07 11:46:29 managed-node2 podman[34248]: Pods stopped:\nMar 07 11:46:29 managed-node2 podman[34248]: 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409\nMar 07 11:46:29 managed-node2 podman[34248]: Pods removed:\nMar 07 11:46:29 managed-node2 podman[34248]: 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409\nMar 07 11:46:29 managed-node2 podman[34248]: Secrets removed:\nMar 07 11:46:29 managed-node2 podman[34248]: Volumes removed:\nMar 07 11:46:29 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state.\nMar 07 11:46:35 managed-node2 systemd[1]: Starting logrotate.service - Rotate log files...\n\u2591\u2591 Subject: A start job for unit logrotate.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit logrotate.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2653.\nMar 07 11:46:35 managed-node2 systemd[1]: logrotate.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit logrotate.service has successfully entered the 'dead' state.\nMar 07 11:46:35 managed-node2 systemd[1]: Finished logrotate.service - Rotate log files.\n\u2591\u2591 Subject: A start job for unit logrotate.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit logrotate.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2653.\nMar 07 11:46:35 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nMar 07 11:46:37 managed-node2 podman[36568]: time=\"2026-03-07T11:46:37-05:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL\"\nMar 07 11:46:37 managed-node2 systemd[1]: libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has successfully entered the 'dead' state.\nMar 07 11:46:37 managed-node2 podman[36568]: 2026-03-07 11:46:37.926712157 -0500 EST m=+10.052014514 container died fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nMar 07 11:46:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ee6d4c1dab7f57321763e3e557cd90f2d6b0b9b7aeaf3ef8eab8ca49efa608d6-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-ee6d4c1dab7f57321763e3e557cd90f2d6b0b9b7aeaf3ef8eab8ca49efa608d6-merged.mount has successfully entered the 'dead' state.\nMar 07 11:46:37 managed-node2 podman[36568]: 2026-03-07 11:46:37.960570271 -0500 EST m=+10.085872435 container cleanup fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test)\nMar 07 11:46:37 managed-node2 systemd[1]: Stopping libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope...\n\u2591\u2591 Subject: A stop job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2732.\nMar 07 11:46:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:46:37 managed-node2 systemd[1]: libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has successfully entered the 'dead' state.\nMar 07 11:46:37 managed-node2 systemd[1]: Stopped libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope.\n\u2591\u2591 Subject: A stop job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2732 and the job result is done.\nMar 07 11:46:38 managed-node2 systemd[1]: Removed slice machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice - cgroup machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2731 and the job result is done.\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.035367566 -0500 EST m=+10.160669765 container remove fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z)\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.056136818 -0500 EST m=+10.181439022 container remove 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d)\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.063189022 -0500 EST m=+10.188491194 pod remove 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d (image=, name=httpd3)\nMar 07 11:46:38 managed-node2 podman[36568]: Pods stopped:\nMar 07 11:46:38 managed-node2 podman[36568]: 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d\nMar 07 11:46:38 managed-node2 podman[36568]: Pods removed:\nMar 07 11:46:38 managed-node2 podman[36568]: 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d\nMar 07 11:46:38 managed-node2 podman[36568]: Secrets removed:\nMar 07 11:46:38 managed-node2 podman[36568]: Volumes removed:\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.063738172 -0500 EST m=+10.189040347 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge)\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.080611451 -0500 EST m=+10.205913648 container create 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:38 managed-node2 systemd[1]: Created slice machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice - cgroup machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2734.\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.119628454 -0500 EST m=+10.244930639 container create f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.12361059 -0500 EST m=+10.248912760 pod create 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a (image=, name=httpd3)\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.147717736 -0500 EST m=+10.273020009 container create e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.148072438 -0500 EST m=+10.273374646 container restart 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.125319335 -0500 EST m=+10.250621668 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nMar 07 11:46:38 managed-node2 systemd[1]: Started libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2740.\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.187397166 -0500 EST m=+10.312699428 container init 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.189764724 -0500 EST m=+10.315067060 container start 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:38 managed-node2 kernel: catatonit[36613]: segfault at a9b80 ip 00007f2b0a99fdbb sp 00007ffeeae7b9e0 error 4 in catatonit[4dbb,7f2b0a99c000+77000] likely on CPU 1 (core 0, socket 0)\nMar 07 11:46:38 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:46:38 managed-node2 systemd-coredump[36615]: Process 36613 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:46:38 managed-node2 systemd[1]: Started systemd-coredump@15-36615-0.service - Process Core Dump (PID 36615/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@15-36615-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@15-36615-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2746.\nMar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:38 managed-node2 kernel: veth0: entered allmulticast mode\nMar 07 11:46:38 managed-node2 kernel: veth0: entered promiscuous mode\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2247] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/9)\nMar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nMar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nMar 07 11:46:38 managed-node2 (udev-worker)[36618]: Network interface NamePolicy= disabled on kernel command line.\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2297] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/10)\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2309] device (veth0): carrier: link connected\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2312] device (podman1): carrier: link connected\nMar 07 11:46:38 managed-node2 (udev-worker)[36619]: Network interface NamePolicy= disabled on kernel command line.\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2590] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2595] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2603] device (podman1): Activation: starting connection 'podman1' (2d400bb2-5548-4489-9134-38d8ab37ffca)\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2605] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external')\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2608] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external')\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2624] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external')\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2626] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')\nMar 07 11:46:38 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2755.\nMar 07 11:46:38 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2755.\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2978] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2981] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external')\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2986] device (podman1): Activation: successful, device activated.\nMar 07 11:46:38 managed-node2 systemd[1]: Started run-p36659-i36660.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit run-p36659-i36660.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit run-p36659-i36660.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2834.\nMar 07 11:46:38 managed-node2 systemd-coredump[36617]: Process 36613 (catatonit) of user 0 dumped core.\n \n Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64\n Stack trace of thread 1:\n #0 0x00007f2b0a99fdbb n/a (/catatonit + 0x4dbb)\n ELF object binary architecture: AMD x86-64\n\u2591\u2591 Subject: Process 36613 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 36613 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:46:38 managed-node2 systemd[1]: Started libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2840.\nMar 07 11:46:38 managed-node2 systemd[1]: libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope has successfully entered the 'dead' state.\nMar 07 11:46:38 managed-node2 systemd[1]: systemd-coredump@15-36615-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@15-36615-0.service has successfully entered the 'dead' state.\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.385243424 -0500 EST m=+10.510545838 container init f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.389173233 -0500 EST m=+10.514475479 container start f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:38 managed-node2 kernel: catatonit[36666]: segfault at a9b80 ip 00007fbf485bcdbb sp 00007ffec6bbd630 error 4 in catatonit[4dbb,7fbf485b9000+77000] likely on CPU 1 (core 0, socket 0)\nMar 07 11:46:38 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:46:38 managed-node2 systemd-coredump[36678]: Process 36666 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:46:38 managed-node2 systemd[1]: Started systemd-coredump@16-36678-0.service - Process Core Dump (PID 36678/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@16-36678-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@16-36678-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2847.\nMar 07 11:46:38 managed-node2 podman[36668]: 2026-03-07 11:46:38.417671038 -0500 EST m=+0.036061473 container died 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:38 managed-node2 systemd[1]: Started libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2856.\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.454475365 -0500 EST m=+10.579777839 container init e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.458255485 -0500 EST m=+10.583557732 container start e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:38 managed-node2 podman[36668]: 2026-03-07 11:46:38.460012504 -0500 EST m=+0.078402768 container cleanup 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.467228715 -0500 EST m=+10.592531020 pod start 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a (image=, name=httpd3)\nMar 07 11:46:38 managed-node2 podman[36568]: Pod:\nMar 07 11:46:38 managed-node2 podman[36568]: 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a\nMar 07 11:46:38 managed-node2 podman[36568]: Container:\nMar 07 11:46:38 managed-node2 podman[36568]: e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda\nMar 07 11:46:38 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range\nMar 07 11:46:38 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2566.\nMar 07 11:46:38 managed-node2 systemd-coredump[36679]: Process 36666 (catatonit) of user 0 dumped core.\n \n Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64\n Stack trace of thread 1:\n #0 0x00007fbf485bcdbb n/a (/catatonit + 0x4dbb)\n ELF object binary architecture: AMD x86-64\n\u2591\u2591 Subject: Process 36666 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 36666 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:46:38 managed-node2 systemd[1]: libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope has successfully entered the 'dead' state.\nMar 07 11:46:38 managed-node2 conmon[36664]: conmon f4619e7c96f87ca49266 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice/libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope/container/memory.events\nMar 07 11:46:38 managed-node2 systemd[1]: systemd-coredump@16-36678-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@16-36678-0.service has successfully entered the 'dead' state.\nMar 07 11:46:38 managed-node2 podman[36689]: 2026-03-07 11:46:38.559767062 -0500 EST m=+0.051261217 pod stop 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a (image=, name=httpd3)\nMar 07 11:46:38 managed-node2 podman[36704]: 2026-03-07 11:46:38.587832463 -0500 EST m=+0.021982125 container died f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:38 managed-node2 systemd[1]: run-p36659-i36660.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-p36659-i36660.scope has successfully entered the 'dead' state.\nMar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:38 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nMar 07 11:46:38 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nMar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nMar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.6196] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nMar 07 11:46:38 managed-node2 podman[36704]: 2026-03-07 11:46:38.676650677 -0500 EST m=+0.110800341 container cleanup f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64-rootfs-merge.mount has successfully entered the 'dead' state.\nMar 07 11:46:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64-userdata-shm.mount has successfully entered the 'dead' state.\nMar 07 11:46:39 managed-node2 sudo[36930]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bhiauhbpazjvrfebfoyyszwwljeltpxs ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901998.8167956-16021-135055247896982/AnsiballZ_command.py'\nMar 07 11:46:39 managed-node2 sudo[36930]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nMar 07 11:46:39 managed-node2 python3.12[36933]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:46:39 managed-node2 kernel: catatonit[36946]: segfault at a9b80 ip 00007f0938c79dbb sp 00007ffd32edf6e0 error 4 in catatonit[4dbb,7f0938c76000+77000] likely on CPU 1 (core 0, socket 0)\nMar 07 11:46:39 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43\nMar 07 11:46:39 managed-node2 systemd-coredump[36953]: Process 36946 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing...\nMar 07 11:46:39 managed-node2 systemd[1]: Started systemd-coredump@17-36953-0.service - Process Core Dump (PID 36953/UID 0).\n\u2591\u2591 Subject: A start job for unit systemd-coredump@17-36953-0.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit systemd-coredump@17-36953-0.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2863.\nMar 07 11:46:39 managed-node2 systemd[29271]: Started podman-36940.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 136.\nMar 07 11:46:39 managed-node2 systemd[29271]: Started podman-pause-7a6a691e.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 140.\nMar 07 11:46:39 managed-node2 systemd-coredump[36955]: Resource limits disable core dumping for process 36946 (catatonit).\nMar 07 11:46:39 managed-node2 systemd-coredump[36955]: Process 36946 (catatonit) of user 3001 terminated abnormally without generating a coredump.\n\u2591\u2591 Subject: Process 36946 (catatonit) dumped core\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: man:core(5)\n\u2591\u2591 \n\u2591\u2591 Process 36946 (catatonit) crashed and dumped core.\n\u2591\u2591 \n\u2591\u2591 This usually indicates a programming error in the crashing program and\n\u2591\u2591 should be reported to its vendor as a bug.\nMar 07 11:46:39 managed-node2 systemd[1]: systemd-coredump@17-36953-0.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit systemd-coredump@17-36953-0.service has successfully entered the 'dead' state.\nMar 07 11:46:39 managed-node2 sudo[36930]: pam_unix(sudo:session): session closed for user podman_basic_user\nMar 07 11:46:39 managed-node2 python3.12[37114]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:46:40 managed-node2 python3.12[37276]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nMar 07 11:46:48 managed-node2 podman[36689]: time=\"2026-03-07T11:46:48-05:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL\"\nMar 07 11:46:48 managed-node2 systemd[1]: libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope has successfully entered the 'dead' state.\nMar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.588635338 -0500 EST m=+10.080129492 container died e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nMar 07 11:46:48 managed-node2 systemd[1]: var-lib-containers-storage-overlay-8566ab6694c8721216d4c8d23c7b9a958cf4d626dd678590315232721a4623bc-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-8566ab6694c8721216d4c8d23c7b9a958cf4d626dd678590315232721a4623bc-merged.mount has successfully entered the 'dead' state.\nMar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.624299419 -0500 EST m=+10.115793441 container cleanup e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nMar 07 11:46:48 managed-node2 systemd[1]: Removed slice machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice - cgroup machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2872 and the job result is done.\nMar 07 11:46:48 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:46:48 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nMar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.658377824 -0500 EST m=+10.149871873 container remove e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nMar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.679532751 -0500 EST m=+10.171026794 container remove f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.686508191 -0500 EST m=+10.178002210 pod remove 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a (image=, name=httpd3)\nMar 07 11:46:48 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nMar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.714211879 -0500 EST m=+10.205706029 container remove 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nMar 07 11:46:48 managed-node2 podman[36689]: Pods stopped:\nMar 07 11:46:48 managed-node2 podman[36689]: 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a\nMar 07 11:46:48 managed-node2 podman[36689]: Pods removed:\nMar 07 11:46:48 managed-node2 podman[36689]: 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a\nMar 07 11:46:48 managed-node2 podman[36689]: Secrets removed:\nMar 07 11:46:48 managed-node2 podman[36689]: Volumes removed:\nMar 07 11:46:48 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state.\nMar 07 11:46:49 managed-node2 python3.12[37452]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None", "task_name": "Dump journal", "task_path": "/tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:457" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Saturday 07 March 2026 11:46:50 -0500 (0:00:00.247) 0:03:35.737 ******** =============================================================================== fedora.linux_system_roles.podman : Ensure required packages are installed -- 33.09s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 fedora.linux_system_roles.podman : Start service ----------------------- 11.30s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:98 fedora.linux_system_roles.podman : Start service ----------------------- 11.30s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:98 fedora.linux_system_roles.podman : Start service ----------------------- 11.22s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:98 Check if pods are running ---------------------------------------------- 10.02s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:302 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 2.81s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 2.80s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 2.76s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 2.75s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 2.74s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 Create data files ------------------------------------------------------- 1.91s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:204 fedora.linux_system_roles.podman : Ensure container images are present --- 1.78s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_images.yml:2 fedora.linux_system_roles.firewall : Enable and start firewalld service --- 1.51s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:33 fedora.linux_system_roles.podman : Update containers/pods --------------- 1.47s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:63 Gathering Facts --------------------------------------------------------- 1.36s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:9 fedora.linux_system_roles.selinux : Set an SELinux label on a port ------ 1.28s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:134 fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 1.25s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:67 fedora.linux_system_roles.firewall : Install firewalld ------------------ 1.22s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.14s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Update containers/pods --------------- 1.12s /tmp/collections-wfn/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:63 Mar 07 11:44:08 managed-node2 python3.12[15164]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:08 managed-node2 dbus-broker-launch[739]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Mar 07 11:44:08 managed-node2 dbus-broker-launch[739]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Mar 07 11:44:08 managed-node2 systemd[1]: Started run-p15170-i15171.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p15170-i15171.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p15170-i15171.service has finished successfully. ░░ ░░ The job identifier is 1737. Mar 07 11:44:08 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1815. Mar 07 11:44:09 managed-node2 python3.12[15332]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Mar 07 11:44:09 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Mar 07 11:44:09 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1815. Mar 07 11:44:09 managed-node2 systemd[1]: run-p15170-i15171.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p15170-i15171.service has successfully entered the 'dead' state. Mar 07 11:44:10 managed-node2 python3.12[15520]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:44:11 managed-node2 python3.12[15676]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Mar 07 11:44:12 managed-node2 kernel: SELinux: Converting 500 SID table entries... Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability open_perms=1 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability always_check_network=0 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Mar 07 11:44:12 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Mar 07 11:44:12 managed-node2 python3.12[15835]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Mar 07 11:44:16 managed-node2 python3.12[15990]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:17 managed-node2 python3.12[16147]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:17 managed-node2 python3.12[16302]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:44:17 managed-node2 python3.12[16457]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Mar 07 11:44:18 managed-node2 python3.12[16582]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901857.642029-10578-203368904599909/.source.yml _original_basename=.e_b4ms88 follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:44:18 managed-node2 python3.12[16737]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:44:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-compat2437484652-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat2437484652-merged.mount has successfully entered the 'dead' state. Mar 07 11:44:18 managed-node2 kernel: evm: overlay not supported Mar 07 11:44:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck1662577311-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck1662577311-merged.mount has successfully entered the 'dead' state. Mar 07 11:44:18 managed-node2 podman[16744]: 2026-03-07 11:44:18.827309846 -0500 EST m=+0.070462949 system refresh Mar 07 11:44:18 managed-node2 podman[16744]: 2026-03-07 11:44:18.828665684 -0500 EST m=+0.071818888 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge) Mar 07 11:44:18 managed-node2 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 1894. Mar 07 11:44:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice - cgroup machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice. ░░ Subject: A start job for unit machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice has finished successfully. ░░ ░░ The job identifier is 1893. Mar 07 11:44:18 managed-node2 podman[16744]: 2026-03-07 11:44:18.87970198 -0500 EST m=+0.122855095 container create 246afbb22b17d10477ddd5d5c90f2d7d06c004c92b0b0defa7b3a3a43e4ecbe5 (image=, name=992c9586519a-infra, pod_id=992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09) Mar 07 11:44:18 managed-node2 podman[16744]: 2026-03-07 11:44:18.883851527 -0500 EST m=+0.127004609 pod create 992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09 (image=, name=nopull) Mar 07 11:44:19 managed-node2 podman[16744]: 2026-03-07 11:44:19.68863062 -0500 EST m=+0.931783734 container create d4fc0055deaed372cb505f1296fe8e33f059d4ac3adf6ad0c54243b547cbb4c4 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z) Mar 07 11:44:19 managed-node2 podman[16744]: 2026-03-07 11:44:19.66621481 -0500 EST m=+0.909368011 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:44:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:44:22 managed-node2 python3.12[17081]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:44:22 managed-node2 python3.12[17242]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:24 managed-node2 python3.12[17399]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:25 managed-node2 python3.12[17555]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Mar 07 11:44:26 managed-node2 python3.12[17712]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Mar 07 11:44:27 managed-node2 python3.12[17869]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Mar 07 11:44:28 managed-node2 python3.12[18024]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:29 managed-node2 python3.12[18180]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:30 managed-node2 python3.12[18336]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:31 managed-node2 python3.12[18492]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Mar 07 11:44:32 managed-node2 python3.12[18676]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Mar 07 11:44:33 managed-node2 python3.12[18831]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Mar 07 11:44:36 managed-node2 python3.12[18986]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:44:38 managed-node2 podman[19151]: 2026-03-07 11:44:38.376455047 -0500 EST m=+0.166868827 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest unable to copy from source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Mar 07 11:44:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:44:38 managed-node2 python3.12[19313]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:39 managed-node2 python3.12[19468]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:44:39 managed-node2 python3.12[19623]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Mar 07 11:44:39 managed-node2 python3.12[19748]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901879.29425-11520-176634290947885/.source.yml _original_basename=.gt65xr01 follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:44:40 managed-node2 python3.12[19903]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:44:40 managed-node2 podman[19910]: 2026-03-07 11:44:40.341496717 -0500 EST m=+0.013739324 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge) Mar 07 11:44:40 managed-node2 systemd[1]: Created slice machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice - cgroup machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice. ░░ Subject: A start job for unit machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice has finished successfully. ░░ ░░ The job identifier is 1899. Mar 07 11:44:40 managed-node2 podman[19910]: 2026-03-07 11:44:40.376773579 -0500 EST m=+0.049016203 container create 1e62cdef56136721315b848a501f951852d27e5af5ee669a7ef1724aa57fbf3a (image=, name=a7c38d962220-infra, pod_id=a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0) Mar 07 11:44:40 managed-node2 podman[19910]: 2026-03-07 11:44:40.381063787 -0500 EST m=+0.053306319 pod create a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0 (image=, name=bogus) Mar 07 11:44:40 managed-node2 podman[19910]: 2026-03-07 11:44:40.531228812 -0500 EST m=+0.203471494 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest unable to copy from source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Mar 07 11:44:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:44:42 managed-node2 python3.12[20227]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:44:43 managed-node2 python3.12[20389]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:45 managed-node2 python3.12[20546]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:46 managed-node2 python3.12[20702]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Mar 07 11:44:47 managed-node2 python3.12[20859]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Mar 07 11:44:48 managed-node2 python3.12[21016]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Mar 07 11:44:49 managed-node2 python3.12[21171]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:50 managed-node2 python3.12[21327]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:51 managed-node2 python3.12[21483]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:44:52 managed-node2 python3.12[21639]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Mar 07 11:44:53 managed-node2 python3.12[21823]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Mar 07 11:44:54 managed-node2 python3.12[21978]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Mar 07 11:44:57 managed-node2 python3.12[22133]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:58 managed-node2 python3.12[22290]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:44:58 managed-node2 python3.12[22446]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Mar 07 11:44:59 managed-node2 python3.12[22603]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:44:59 managed-node2 python3.12[22760]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:44:59 managed-node2 python3.12[22760]: ansible-containers.podman.podman_play version: 5.8.0, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Mar 07 11:44:59 managed-node2 podman[22767]: 2026-03-07 11:44:59.831024987 -0500 EST m=+0.021885022 pod stop 992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09 (image=, name=nopull) Mar 07 11:44:59 managed-node2 systemd[1]: Removed slice machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice - cgroup machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice. ░░ Subject: A stop job for unit machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09.slice has finished. ░░ ░░ The job identifier is 1905 and the job result is done. Mar 07 11:44:59 managed-node2 podman[22767]: 2026-03-07 11:44:59.862282915 -0500 EST m=+0.053142851 container remove d4fc0055deaed372cb505f1296fe8e33f059d4ac3adf6ad0c54243b547cbb4c4 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Mar 07 11:44:59 managed-node2 podman[22767]: 2026-03-07 11:44:59.882439363 -0500 EST m=+0.073299300 container remove 246afbb22b17d10477ddd5d5c90f2d7d06c004c92b0b0defa7b3a3a43e4ecbe5 (image=, name=992c9586519a-infra, pod_id=992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09) Mar 07 11:44:59 managed-node2 podman[22767]: 2026-03-07 11:44:59.88990995 -0500 EST m=+0.080769856 pod remove 992c9586519a1ddf25a2c60d07146bc61654120f0d4d4ecca4f7f57e390a2f09 (image=, name=nopull) Mar 07 11:44:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:45:00 managed-node2 python3.12[22931]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:45:00 managed-node2 python3.12[23086]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:00 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:45:03 managed-node2 python3.12[23403]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:03 managed-node2 python3.12[23564]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:45:05 managed-node2 python3.12[23721]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:06 managed-node2 python3.12[23877]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Mar 07 11:45:07 managed-node2 python3.12[24034]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Mar 07 11:45:08 managed-node2 python3.12[24191]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Mar 07 11:45:09 managed-node2 python3.12[24346]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:10 managed-node2 python3.12[24502]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:11 managed-node2 python3.12[24658]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:11 managed-node2 python3.12[24814]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Mar 07 11:45:13 managed-node2 python3.12[24998]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Mar 07 11:45:13 managed-node2 python3.12[25153]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Mar 07 11:45:17 managed-node2 python3.12[25308]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:45:18 managed-node2 python3.12[25465]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:19 managed-node2 python3.12[25622]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Mar 07 11:45:19 managed-node2 python3.12[25779]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:45:20 managed-node2 python3.12[25936]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:45:20 managed-node2 python3.12[25936]: ansible-containers.podman.podman_play version: 5.8.0, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Mar 07 11:45:20 managed-node2 podman[25943]: 2026-03-07 11:45:20.102000803 -0500 EST m=+0.020362137 pod stop a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0 (image=, name=bogus) Mar 07 11:45:20 managed-node2 systemd[1]: Removed slice machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice - cgroup machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice. ░░ Subject: A stop job for unit machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0.slice has finished. ░░ ░░ The job identifier is 1907 and the job result is done. Mar 07 11:45:20 managed-node2 podman[25943]: 2026-03-07 11:45:20.136901485 -0500 EST m=+0.055262816 container remove 1e62cdef56136721315b848a501f951852d27e5af5ee669a7ef1724aa57fbf3a (image=, name=a7c38d962220-infra, pod_id=a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0) Mar 07 11:45:20 managed-node2 podman[25943]: 2026-03-07 11:45:20.143954123 -0500 EST m=+0.062315426 pod remove a7c38d9622202bb8701cb54f964791645d3c651230d17d798b7fe42f0af594d0 (image=, name=bogus) Mar 07 11:45:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:45:20 managed-node2 python3.12[26107]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:45:20 managed-node2 python3.12[26262]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:45:23 managed-node2 python3.12[26580]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:24 managed-node2 python3.12[26741]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:45:27 managed-node2 python3.12[26898]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:28 managed-node2 python3.12[27054]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Mar 07 11:45:28 managed-node2 python3.12[27211]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Mar 07 11:45:29 managed-node2 python3.12[27368]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] ipset_options={} protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Mar 07 11:45:30 managed-node2 python3.12[27523]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:31 managed-node2 python3.12[27679]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:32 managed-node2 python3.12[27835]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Mar 07 11:45:33 managed-node2 python3.12[27991]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Mar 07 11:45:34 managed-node2 python3.12[28175]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Mar 07 11:45:35 managed-node2 python3.12[28330]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Mar 07 11:45:39 managed-node2 python3.12[28486]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Mar 07 11:45:39 managed-node2 python3.12[28642]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:45:39 managed-node2 python3.12[28799]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:40 managed-node2 python3.12[28955]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:41 managed-node2 python3.12[29111]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:45:41 managed-node2 python3.12[29267]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Mar 07 11:45:41 managed-node2 systemd[1]: Created slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A start job for unit user-3001.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-3001.slice has finished successfully. ░░ ░░ The job identifier is 1987. Mar 07 11:45:41 managed-node2 systemd[1]: Starting user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A start job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 1909. Mar 07 11:45:41 managed-node2 systemd[1]: Finished user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A start job for unit user-runtime-dir@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has finished successfully. ░░ ░░ The job identifier is 1909. Mar 07 11:45:41 managed-node2 systemd[1]: Starting user@3001.service - User Manager for UID 3001... ░░ Subject: A start job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 1989. Mar 07 11:45:41 managed-node2 systemd-logind[759]: New session 9 of user podman_basic_user. ░░ Subject: A new session 9 has been created for user podman_basic_user ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 9 has been created for the user podman_basic_user. ░░ ░░ The leading process of the session is 29271. Mar 07 11:45:41 managed-node2 (systemd)[29271]: pam_unix(systemd-user:session): session opened for user podman_basic_user(uid=3001) by podman_basic_user(uid=0) Mar 07 11:45:41 managed-node2 systemd[29271]: Queued start job for default target default.target. Mar 07 11:45:41 managed-node2 systemd[29271]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Mar 07 11:45:41 managed-node2 systemd[29271]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Mar 07 11:45:41 managed-node2 systemd[29271]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Mar 07 11:45:41 managed-node2 systemd[29271]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Mar 07 11:45:41 managed-node2 systemd[29271]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Mar 07 11:45:41 managed-node2 systemd[29271]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 6. Mar 07 11:45:41 managed-node2 systemd[29271]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Mar 07 11:45:41 managed-node2 systemd[29271]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Mar 07 11:45:41 managed-node2 systemd[29271]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Mar 07 11:45:41 managed-node2 systemd[29271]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Mar 07 11:45:41 managed-node2 systemd[29271]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Mar 07 11:45:41 managed-node2 systemd[29271]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Mar 07 11:45:41 managed-node2 systemd[29271]: Startup finished in 67ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 67799 microseconds. Mar 07 11:45:41 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 1989. Mar 07 11:45:42 managed-node2 python3.12[29442]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:45:42 managed-node2 python3.12[29597]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:45:43 managed-node2 sudo[29802]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xhibmgzsvzhrmfekymwxsnjugtcfcwcf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901942.8189244-14095-168859344459604/AnsiballZ_podman_image.py' Mar 07 11:45:43 managed-node2 sudo[29802]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Mar 07 11:45:43 managed-node2 kernel: catatonit[29817]: segfault at a9b80 ip 00007fdf17f46dbb sp 00007fffc0c93fa0 error 4 in catatonit[4dbb,7fdf17f43000+77000] likely on CPU 0 (core 0, socket 0) Mar 07 11:45:43 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:43 managed-node2 systemd-coredump[29824]: Process 29817 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:43 managed-node2 systemd[1]: Created slice system-systemd\x2dcoredump.slice - Slice /system/systemd-coredump. ░░ Subject: A start job for unit system-systemd\x2dcoredump.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-systemd\x2dcoredump.slice has finished successfully. ░░ ░░ The job identifier is 2075. Mar 07 11:45:43 managed-node2 systemd[1]: Started systemd-coredump@0-29824-0.service - Process Core Dump (PID 29824/UID 0). ░░ Subject: A start job for unit systemd-coredump@0-29824-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@0-29824-0.service has finished successfully. ░░ ░░ The job identifier is 2070. Mar 07 11:45:43 managed-node2 systemd[29271]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Mar 07 11:45:43 managed-node2 systemd[29271]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Mar 07 11:45:43 managed-node2 systemd[29271]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Mar 07 11:45:43 managed-node2 dbus-broker-launch[29829]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Mar 07 11:45:43 managed-node2 dbus-broker-launch[29829]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Mar 07 11:45:43 managed-node2 systemd-coredump[29826]: Resource limits disable core dumping for process 29817 (catatonit). Mar 07 11:45:43 managed-node2 systemd-coredump[29826]: Process 29817 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 29817 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 29817 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:43 managed-node2 systemd[1]: systemd-coredump@0-29824-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@0-29824-0.service has successfully entered the 'dead' state. Mar 07 11:45:43 managed-node2 dbus-broker-launch[29829]: Ready Mar 07 11:45:43 managed-node2 systemd[29271]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Mar 07 11:45:43 managed-node2 systemd[29271]: Started podman-29812.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Mar 07 11:45:43 managed-node2 kernel: catatonit[29846]: segfault at a9b80 ip 00007ff7160afdbb sp 00007fff8cfc2e40 error 4 in catatonit[4dbb,7ff7160ac000+77000] likely on CPU 0 (core 0, socket 0) Mar 07 11:45:43 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:43 managed-node2 systemd-coredump[29853]: Process 29846 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:43 managed-node2 systemd[1]: Started systemd-coredump@1-29853-0.service - Process Core Dump (PID 29853/UID 0). ░░ Subject: A start job for unit systemd-coredump@1-29853-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@1-29853-0.service has finished successfully. ░░ ░░ The job identifier is 2079. Mar 07 11:45:43 managed-node2 systemd[29271]: Started podman-29841.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Mar 07 11:45:43 managed-node2 systemd-coredump[29856]: Resource limits disable core dumping for process 29846 (catatonit). Mar 07 11:45:43 managed-node2 systemd-coredump[29856]: Process 29846 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 29846 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 29846 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:43 managed-node2 systemd[1]: systemd-coredump@1-29853-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@1-29853-0.service has successfully entered the 'dead' state. Mar 07 11:45:44 managed-node2 kernel: catatonit[29891]: segfault at a9b80 ip 00007fc382ae6dbb sp 00007fff833112d0 error 4 in catatonit[4dbb,7fc382ae3000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:44 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:44 managed-node2 systemd-coredump[29898]: Process 29891 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:44 managed-node2 systemd[1]: Started systemd-coredump@2-29898-0.service - Process Core Dump (PID 29898/UID 0). ░░ Subject: A start job for unit systemd-coredump@2-29898-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@2-29898-0.service has finished successfully. ░░ ░░ The job identifier is 2088. Mar 07 11:45:44 managed-node2 systemd[29271]: Started podman-29886.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Mar 07 11:45:44 managed-node2 systemd[29271]: Started podman-pause-320fc37c.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Mar 07 11:45:44 managed-node2 systemd-coredump[29900]: Resource limits disable core dumping for process 29891 (catatonit). Mar 07 11:45:44 managed-node2 systemd-coredump[29900]: Process 29891 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 29891 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 29891 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:44 managed-node2 systemd[1]: systemd-coredump@2-29898-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@2-29898-0.service has successfully entered the 'dead' state. Mar 07 11:45:44 managed-node2 systemd[29271]: Started podman-29905.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Mar 07 11:45:44 managed-node2 kernel: catatonit[29927]: segfault at a9b80 ip 00007f18b4071dbb sp 00007fffc0fc07f0 error 4 in catatonit[4dbb,7f18b406e000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:44 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:44 managed-node2 systemd-coredump[29935]: Process 29927 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:44 managed-node2 systemd[1]: Started systemd-coredump@3-29935-0.service - Process Core Dump (PID 29935/UID 0). ░░ Subject: A start job for unit systemd-coredump@3-29935-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@3-29935-0.service has finished successfully. ░░ ░░ The job identifier is 2097. Mar 07 11:45:44 managed-node2 systemd[29271]: Started podman-29922.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Mar 07 11:45:44 managed-node2 systemd-coredump[29936]: Resource limits disable core dumping for process 29927 (catatonit). Mar 07 11:45:44 managed-node2 systemd-coredump[29936]: Process 29927 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 29927 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 29927 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:44 managed-node2 systemd[29271]: podman-pause-bf3bda8f.scope: Couldn't move process 29927 to requested cgroup '/user.slice/user-3001.slice/user@3001.service/user.slice/podman-pause-bf3bda8f.scope' (directly or via the system bus): No such process Mar 07 11:45:44 managed-node2 systemd[29271]: podman-pause-bf3bda8f.scope: Failed to add PIDs to scope's control group: Permission denied Mar 07 11:45:44 managed-node2 systemd[29271]: podman-pause-bf3bda8f.scope: Failed with result 'resources'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT has entered the 'failed' state with result 'resources'. Mar 07 11:45:44 managed-node2 systemd[1]: systemd-coredump@3-29935-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@3-29935-0.service has successfully entered the 'dead' state. Mar 07 11:45:44 managed-node2 systemd[29271]: Failed to start podman-pause-bf3bda8f.scope. ░░ Subject: A start job for unit UNIT has failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished with a failure. ░░ ░░ The job identifier is 43 and the job result is failed. Mar 07 11:45:44 managed-node2 kernel: catatonit[29953]: segfault at a9b80 ip 00007faeb6efadbb sp 00007ffc7421de50 error 4 in catatonit[4dbb,7faeb6ef7000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:44 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:44 managed-node2 systemd-coredump[29961]: Process 29953 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:44 managed-node2 systemd[1]: Started systemd-coredump@4-29961-0.service - Process Core Dump (PID 29961/UID 0). ░░ Subject: A start job for unit systemd-coredump@4-29961-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@4-29961-0.service has finished successfully. ░░ ░░ The job identifier is 2106. Mar 07 11:45:44 managed-node2 systemd[29271]: Started podman-29948.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Mar 07 11:45:44 managed-node2 systemd-coredump[29963]: Resource limits disable core dumping for process 29953 (catatonit). Mar 07 11:45:44 managed-node2 systemd-coredump[29963]: Process 29953 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 29953 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 29953 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:44 managed-node2 systemd[29271]: Started podman-pause-ed80bfc9.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Mar 07 11:45:44 managed-node2 systemd[1]: systemd-coredump@4-29961-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@4-29961-0.service has successfully entered the 'dead' state. Mar 07 11:45:44 managed-node2 sudo[29802]: pam_unix(sudo:session): session closed for user podman_basic_user Mar 07 11:45:44 managed-node2 python3.12[30123]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:45:45 managed-node2 python3.12[30278]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:45:45 managed-node2 python3.12[30433]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Mar 07 11:45:46 managed-node2 python3.12[30558]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901945.4660614-14184-34473031875573/.source.yml _original_basename=.8iqqb013 follow=False checksum=5a374c59230176d446e6cd38bcc64da326c45092 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:45:46 managed-node2 sudo[30763]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oerbzxzssklczavswqvqximbbvrtsmtu ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901946.1921344-14217-62861233194905/AnsiballZ_podman_play.py' Mar 07 11:45:46 managed-node2 sudo[30763]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Mar 07 11:45:46 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:45:46 managed-node2 kernel: catatonit[30786]: segfault at a9b80 ip 00007f77afc58dbb sp 00007fff81ee37a0 error 4 in catatonit[4dbb,7f77afc55000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:46 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:46 managed-node2 systemd-coredump[30794]: Process 30786 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:46 managed-node2 systemd[1]: Started systemd-coredump@5-30794-0.service - Process Core Dump (PID 30794/UID 0). ░░ Subject: A start job for unit systemd-coredump@5-30794-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@5-30794-0.service has finished successfully. ░░ ░░ The job identifier is 2115. Mar 07 11:45:46 managed-node2 systemd[29271]: Started podman-30780.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 55. Mar 07 11:45:46 managed-node2 systemd[29271]: Created slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice - cgroup user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 59. Mar 07 11:45:46 managed-node2 systemd-coredump[30796]: Resource limits disable core dumping for process 30786 (catatonit). Mar 07 11:45:46 managed-node2 systemd-coredump[30796]: Process 30786 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 30786 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 30786 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:46 managed-node2 systemd[1]: systemd-coredump@5-30794-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@5-30794-0.service has successfully entered the 'dead' state. Mar 07 11:45:46 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Mar 07 11:45:46 managed-node2 systemd[29271]: Started rootless-netns-8fa057a2.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 63. Mar 07 11:45:46 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Mar 07 11:45:46 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:45:46 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:45:46 managed-node2 kernel: veth0: entered allmulticast mode Mar 07 11:45:46 managed-node2 kernel: veth0: entered promiscuous mode Mar 07 11:45:46 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:45:46 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:45:46 managed-node2 systemd[29271]: Started run-p30834-i30835.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 67. Mar 07 11:45:46 managed-node2 aardvark-dns[30834]: starting aardvark on a child with pid 30835 Mar 07 11:45:46 managed-node2 aardvark-dns[30835]: Successfully parsed config Mar 07 11:45:46 managed-node2 aardvark-dns[30835]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Mar 07 11:45:46 managed-node2 aardvark-dns[30835]: Listen v6 ip {} Mar 07 11:45:46 managed-node2 aardvark-dns[30835]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Mar 07 11:45:46 managed-node2 conmon[30852]: conmon a532637f985bd7708dd5 : failed to write to /proc/self/oom_score_adj: Permission denied Mar 07 11:45:46 managed-node2 systemd[29271]: Started libpod-conmon-a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Mar 07 11:45:46 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/15/attach} Mar 07 11:45:46 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : terminal_ctrl_fd: 15 Mar 07 11:45:46 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : winsz read side: 18, winsz write side: 19 Mar 07 11:45:47 managed-node2 systemd[29271]: Started libpod-a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 76. Mar 07 11:45:47 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : container PID: 30855 Mar 07 11:45:47 managed-node2 kernel: catatonit[30855]: segfault at a9b80 ip 00007f9031569dbb sp 00007ffe6192e700 error 4 in catatonit[4dbb,7f9031566000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:47 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:47 managed-node2 systemd-coredump[30857]: Process 30855 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:47 managed-node2 systemd[1]: Started systemd-coredump@6-30857-0.service - Process Core Dump (PID 30857/UID 0). ░░ Subject: A start job for unit systemd-coredump@6-30857-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@6-30857-0.service has finished successfully. ░░ ░░ The job identifier is 2124. Mar 07 11:45:47 managed-node2 conmon[30859]: conmon ee4a1b77972d6a790be3 : failed to write to /proc/self/oom_score_adj: Permission denied Mar 07 11:45:47 managed-node2 systemd[29271]: Started libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 81. Mar 07 11:45:47 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Mar 07 11:45:47 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : terminal_ctrl_fd: 14 Mar 07 11:45:47 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : winsz read side: 17, winsz write side: 18 Mar 07 11:45:47 managed-node2 systemd[29271]: Started libpod-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Mar 07 11:45:47 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : container PID: 30863 Mar 07 11:45:47 managed-node2 systemd-coredump[30858]: Resource limits disable core dumping for process 30855 (catatonit). Mar 07 11:45:47 managed-node2 systemd-coredump[30858]: Process 30855 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 30855 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 30855 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:47 managed-node2 systemd[1]: systemd-coredump@6-30857-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@6-30857-0.service has successfully entered the 'dead' state. Mar 07 11:45:47 managed-node2 conmon[30853]: conmon a532637f985bd7708dd5 : container 30855 exited with status 139 Mar 07 11:45:47 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Mar 07 11:45:47 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399 Container: ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 Mar 07 11:45:47 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2026-03-07T11:45:46-05:00" level=info msg="/bin/podman filtering at log level debug" time="2026-03-07T11:45:46-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2026-03-07T11:45:46-05:00" level=info msg="Setting parallel job count to 7" time="2026-03-07T11:45:46-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2026-03-07T11:45:46-05:00" level=info msg="Using sqlite as database backend" time="2026-03-07T11:45:46-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2026-03-07T11:45:46-05:00" level=debug msg="Using graph driver overlay" time="2026-03-07T11:45:46-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2026-03-07T11:45:46-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2026-03-07T11:45:46-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2026-03-07T11:45:46-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2026-03-07T11:45:46-05:00" level=debug msg="Using transient store: false" time="2026-03-07T11:45:46-05:00" level=debug msg="Not configuring container store" time="2026-03-07T11:45:46-05:00" level=debug msg="Initializing event backend file" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2026-03-07T11:45:46-05:00" level=info msg="Creating a new rootless user namespace" time="2026-03-07T11:45:46-05:00" level=info msg="/bin/podman filtering at log level debug" time="2026-03-07T11:45:46-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2026-03-07T11:45:46-05:00" level=info msg="Setting parallel job count to 7" time="2026-03-07T11:45:46-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2026-03-07T11:45:46-05:00" level=info msg="Using sqlite as database backend" time="2026-03-07T11:45:46-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2026-03-07T11:45:46-05:00" level=debug msg="Using graph driver overlay" time="2026-03-07T11:45:46-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2026-03-07T11:45:46-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2026-03-07T11:45:46-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2026-03-07T11:45:46-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2026-03-07T11:45:46-05:00" level=debug msg="Using transient store: false" time="2026-03-07T11:45:46-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2026-03-07T11:45:46-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2026-03-07T11:45:46-05:00" level=debug msg="Initializing event backend file" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2026-03-07T11:45:46-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Successfully loaded 1 networks" time="2026-03-07T11:45:46-05:00" level=debug msg="found free device name podman1" time="2026-03-07T11:45:46-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2026-03-07T11:45:46-05:00" level=debug msg="Pod using bridge network mode" time="2026-03-07T11:45:46-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice for parent user.slice and name libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399" time="2026-03-07T11:45:46-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:46-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:46-05:00" level=debug msg="no command or entrypoint provided, and no CMD or ENTRYPOINT from image: defaulting to empty string" time="2026-03-07T11:45:46-05:00" level=debug msg="using systemd mode: false" time="2026-03-07T11:45:46-05:00" level=debug msg="setting container name 46ab0de2b796-infra" time="2026-03-07T11:45:46-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 196e978a743fccc03fb8ddd2d41a1f9a15d160f55231f9844a9070e6a9ce61ba bridge podman1 2026-03-07 11:45:46.711313386 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2026-03-07T11:45:46-05:00" level=debug msg="Successfully loaded 2 networks" time="2026-03-07T11:45:46-05:00" level=debug msg="Allocated lock 1 for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:46-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2026-03-07T11:45:46-05:00" level=debug msg="Check for idmapped mounts support " time="2026-03-07T11:45:46-05:00" level=debug msg="Created container \"a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Container \"a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Container \"a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431\" has run directory \"/run/user/3001/containers/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:45:46-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:45:46-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2026-03-07T11:45:46-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:45:46-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:45:46-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:45:46-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:45:46-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:45:46-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:45:46-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:45:46-05:00" level=debug msg="using systemd mode: false" time="2026-03-07T11:45:46-05:00" level=debug msg="adding container to pod httpd1" time="2026-03-07T11:45:46-05:00" level=debug msg="setting container name httpd1-httpd1" time="2026-03-07T11:45:46-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2026-03-07T11:45:46-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /proc" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /dev" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /dev/pts" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /dev/mqueue" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /sys" time="2026-03-07T11:45:46-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2026-03-07T11:45:46-05:00" level=debug msg="Allocated lock 2 for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431" time="2026-03-07T11:45:46-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Created container \"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Container \"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Container \"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\" has run directory \"/run/user/3001/containers/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Strongconnecting node a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:46-05:00" level=debug msg="Pushed a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 onto stack" time="2026-03-07T11:45:46-05:00" level=debug msg="Finishing node a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431. Popped a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 off stack" time="2026-03-07T11:45:46-05:00" level=debug msg="Strongconnecting node ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431" time="2026-03-07T11:45:46-05:00" level=debug msg="Pushed ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 onto stack" time="2026-03-07T11:45:46-05:00" level=debug msg="Finishing node ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431. Popped ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 off stack" time="2026-03-07T11:45:46-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-dcd66955-fe96-f197-416b-aad9b87d86cb for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:46-05:00" level=debug msg="Created root filesystem for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/rootfs/merge" time="2026-03-07T11:45:46-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2026-03-07T11:45:46-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2026-03-07T11:45:46-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::bridge] Using mtu 65520 from default route interface for the network [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/ip_forward to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/route_localnet to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink_route] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_196e978a_10_89_0_0_nm24 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "d6:83:e5:9a:8f:77", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Starting parent driver\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport112898145/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport112898145/.bp.sock]\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport: time=\"2026-03-07T11:45:46-05:00\" level=info msg=Ready\n" time="2026-03-07T11:45:46-05:00" level=debug msg="rootlessport is ready" time="2026-03-07T11:45:46-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2026-03-07T11:45:46-05:00" level=debug msg="Setting Cgroups for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 to user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice:libpod:a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:46-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2026-03-07T11:45:46-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/rootfs/merge\"" time="2026-03-07T11:45:46-05:00" level=debug msg="Created OCI spec for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/config.json" time="2026-03-07T11:45:46-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice for parent user.slice and name libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399" time="2026-03-07T11:45:46-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:46-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:46-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2026-03-07T11:45:46-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 -u a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata -p /run/user/3001/containers/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/pidfile -n 46ab0de2b796-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431]" time="2026-03-07T11:45:46-05:00" level=info msg="Running conmon under slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice and unitName libpod-conmon-a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2026-03-07T11:45:47-05:00" level=debug msg="Received: 30855" time="2026-03-07T11:45:47-05:00" level=info msg="Got Conmon PID as 30853" time="2026-03-07T11:45:47-05:00" level=debug msg="Created container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 in OCI runtime" time="2026-03-07T11:45:47-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2026-03-07T11:45:47-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2026-03-07T11:45:47-05:00" level=debug msg="Starting container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431 with command [/catatonit -P]" time="2026-03-07T11:45:47-05:00" level=debug msg="Started container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" time="2026-03-07T11:45:47-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/JH6ZGMTVFTMNRQA4DOPAJEHHJA,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c60,c376\"" time="2026-03-07T11:45:47-05:00" level=debug msg="Mounted container \"ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/merged\"" time="2026-03-07T11:45:47-05:00" level=debug msg="Created root filesystem for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 at /home/podman_basic_user/.local/share/containers/storage/overlay/ad9a1c74a0c4038ac9a231e37e8758d88e5000d7bc2897b0ae664d4e57b3be58/merged" time="2026-03-07T11:45:47-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2026-03-07T11:45:47-05:00" level=debug msg="Setting Cgroups for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 to user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice:libpod:ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431" time="2026-03-07T11:45:47-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2026-03-07T11:45:47-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2026-03-07T11:45:47-05:00" level=debug msg="Created OCI spec for container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/config.json" time="2026-03-07T11:45:47-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice for parent user.slice and name libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399" time="2026-03-07T11:45:47-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:47-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice" time="2026-03-07T11:45:47-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2026-03-07T11:45:47-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 -u ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata -p /run/user/3001/containers/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431]" time="2026-03-07T11:45:47-05:00" level=info msg="Running conmon under slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice and unitName libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2026-03-07T11:45:47-05:00" level=debug msg="Received: 30863" time="2026-03-07T11:45:47-05:00" level=info msg="Got Conmon PID as 30861" time="2026-03-07T11:45:47-05:00" level=debug msg="Created container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 in OCI runtime" time="2026-03-07T11:45:47-05:00" level=debug msg="Starting container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431 with command [/bin/busybox-extras httpd -f -p 80]" time="2026-03-07T11:45:47-05:00" level=debug msg="Started container ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431" time="2026-03-07T11:45:47-05:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2026-03-07T11:45:47-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=30780 time="2026-03-07T11:45:47-05:00" level=debug msg="Shutting down engines" Mar 07 11:45:47 managed-node2 python3.12[30766]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431)" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="Setting parallel job count to 7" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="Using sqlite as database backend" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="systemd-logind: Unknown object '/'." Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using graph driver overlay" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using run root /run/user/3001/containers" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using transient store: false" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Cached value indicated that native-diff is usable" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Initializing event backend file" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Mar 07 11:45:47 managed-node2 sudo[30763]: pam_unix(sudo:session): session closed for user podman_basic_user Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Cleaning up container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Tearing down network namespace at /run/user/3001/netns/netns-dcd66955-fe96-f197-416b-aad9b87d86cb for container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 196e978a743fccc03fb8ddd2d41a1f9a15d160f55231f9844a9070e6a9ce61ba bridge podman1 2026-03-07 11:45:46.711313386 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Successfully loaded 2 networks" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Tearing down..\n" Mar 07 11:45:47 managed-node2 aardvark-dns[30835]: Received SIGHUP Mar 07 11:45:47 managed-node2 aardvark-dns[30835]: Successfully parsed config Mar 07 11:45:47 managed-node2 aardvark-dns[30835]: Listen v4 ip {} Mar 07 11:45:47 managed-node2 aardvark-dns[30835]: Listen v6 ip {} Mar 07 11:45:47 managed-node2 aardvark-dns[30835]: No configuration found stopping the sever Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [INFO netavark::firewall] Using nftables firewall driver\n" Mar 07 11:45:47 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:45:47 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Mar 07 11:45:47 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Mar 07 11:45:47 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [INFO netavark::network::bridge] removing bridge podman1\n" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"INPUT\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(Meta(Meta { key: L4proto })), right: Named(Set([Element(String(\"tcp\")), Element(String(\"udp\"))])), op: EQ }), Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"th\", field: \"dport\" }))), right: Number(53), op: EQ }), Accept(None)], handle: Some(23), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"daddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(CT(CT { key: \"state\", family: None, dir: None })), right: List([String(\"established\"), String(\"related\")]), op: IN }), Accept(None)], handle: Some(24), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Accept(None)], handle: Some(25), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"POSTROUTING\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Jump(JumpTarget { target: \"nv_196e978a_10_89_0_0_nm24\" })], handle: Some(26), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 4 rules\n[DEBUG netavark::firewall::nft] Found chain nv_196e978a_10_89_0_0_nm24\n" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"NETAVARK-ISOLATION-3\", expr: [Match(Match { left: Named(Meta(Meta { key: Oifname })), right: String(\"podman1\"), op: EQ }), Drop(None)], handle: Some(17), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 1 isolation rules for network\n" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Found chain nv_196e978a_10_89_0_0_nm24_dnat\n" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Found chain nv_196e978a_10_89_0_0_nm24_dnat\n" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Teardown complete\n" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Cleaning up rootless network namespace" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Successfully cleaned up container a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only a532637f985bd7708dd54d0ffcdc6be463d599c59420d1a9c767b64ad803f431)" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=debug msg="Shutting down engines" Mar 07 11:45:47 managed-node2 /usr/bin/podman[30868]: time="2026-03-07T11:45:47-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=30868 Mar 07 11:45:47 managed-node2 sudo[31091]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dogisxyjglvsrovdvtkjavoxxllbgpoh ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901947.3123984-14267-185518984212991/AnsiballZ_systemd.py' Mar 07 11:45:47 managed-node2 sudo[31091]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Mar 07 11:45:47 managed-node2 python3.12[31094]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Mar 07 11:45:47 managed-node2 systemd[29271]: Reload requested from client PID 31095 ('systemctl')... Mar 07 11:45:47 managed-node2 systemd[29271]: Reloading... Mar 07 11:45:47 managed-node2 systemd[29271]: Reloading finished in 42 ms. Mar 07 11:45:47 managed-node2 sudo[31091]: pam_unix(sudo:session): session closed for user podman_basic_user Mar 07 11:45:48 managed-node2 sudo[31309]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rictksfalktnaehelhfnnbscqeskopdj ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901947.9564757-14297-244281299619627/AnsiballZ_systemd.py' Mar 07 11:45:48 managed-node2 sudo[31309]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Mar 07 11:45:48 managed-node2 python3.12[31312]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Mar 07 11:45:48 managed-node2 systemd[29271]: Reload requested from client PID 31315 ('systemctl')... Mar 07 11:45:48 managed-node2 systemd[29271]: Reloading... Mar 07 11:45:48 managed-node2 systemd[29271]: Reloading finished in 39 ms. Mar 07 11:45:48 managed-node2 sudo[31309]: pam_unix(sudo:session): session closed for user podman_basic_user Mar 07 11:45:48 managed-node2 sudo[31529]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nuvmtvywssfvscqhqidkfbcugrldioki ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901948.6670218-14329-160015858227475/AnsiballZ_systemd.py' Mar 07 11:45:48 managed-node2 sudo[31529]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Mar 07 11:45:49 managed-node2 python3.12[31532]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Mar 07 11:45:49 managed-node2 systemd[29271]: Created slice app-podman\x2dkube.slice - Slice /app/podman-kube. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 103. Mar 07 11:45:49 managed-node2 systemd[29271]: Starting podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 91. Mar 07 11:45:49 managed-node2 kernel: catatonit[31544]: segfault at a9b80 ip 00007f1befc8ddbb sp 00007ffc46cfc240 error 4 in catatonit[4dbb,7f1befc8a000+77000] likely on CPU 0 (core 0, socket 0) Mar 07 11:45:49 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:49 managed-node2 systemd-coredump[31550]: Process 31544 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:49 managed-node2 systemd[1]: Started systemd-coredump@7-31550-0.service - Process Core Dump (PID 31550/UID 0). ░░ Subject: A start job for unit systemd-coredump@7-31550-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@7-31550-0.service has finished successfully. ░░ ░░ The job identifier is 2135. Mar 07 11:45:49 managed-node2 systemd-coredump[31553]: Process 31544 (catatonit) of user 3001 dumped core. Module /usr/libexec/catatonit/catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 31544: #0 0x00007f1befc8ddbb __libc_setup_tls (/usr/libexec/catatonit/catatonit + 0x4dbb) #1 0x00007f1befc8da79 __libc_start_main_impl (/usr/libexec/catatonit/catatonit + 0x4a79) #2 0x00007f1befc8b4e5 _start (/usr/libexec/catatonit/catatonit + 0x24e5) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 31544 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 31544 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:49 managed-node2 systemd[1]: systemd-coredump@7-31550-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@7-31550-0.service has successfully entered the 'dead' state. Mar 07 11:45:59 managed-node2 podman[31542]: time="2026-03-07T11:45:59-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Mar 07 11:45:59 managed-node2 conmon[30861]: conmon ee4a1b77972d6a790be3 : container 30863 exited with status 137 Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431)" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=info msg="Setting parallel job count to 7" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=info msg="Using sqlite as database backend" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="systemd-logind: Unknown object '/'." Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using graph driver overlay" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using run root /run/user/3001/containers" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using transient store: false" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Cached value indicated that native-diff is usable" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Initializing event backend file" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431)" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=debug msg="Shutting down engines" Mar 07 11:45:59 managed-node2 /usr/bin/podman[31562]: time="2026-03-07T11:45:59-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=31562 Mar 07 11:45:59 managed-node2 systemd[29271]: Stopping libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 105. Mar 07 11:45:59 managed-node2 systemd[29271]: Stopped libpod-conmon-ee4a1b77972d6a790be318d37fd3135f592b5d5a84333863b597d3476d397431.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 105 and the job result is done. Mar 07 11:45:59 managed-node2 systemd[29271]: Removed slice user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice - cgroup user-libpod_pod_46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 104 and the job result is done. Mar 07 11:45:59 managed-node2 podman[31542]: Pods stopped: Mar 07 11:45:59 managed-node2 podman[31542]: 46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399 Mar 07 11:45:59 managed-node2 podman[31542]: Pods removed: Mar 07 11:45:59 managed-node2 podman[31542]: 46ab0de2b796fee071be32508a55fb1fae0ad6110923b3ff549d47a55abe1399 Mar 07 11:45:59 managed-node2 podman[31542]: Secrets removed: Mar 07 11:45:59 managed-node2 podman[31542]: Volumes removed: Mar 07 11:45:59 managed-node2 systemd[29271]: Created slice user-libpod_pod_02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a.slice - cgroup user-libpod_pod_02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Mar 07 11:45:59 managed-node2 systemd[29271]: Started libpod-879669150b8fd150356b3d47d7f340be20e76730c97501db0eb82939c5fc9bd1.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 111. Mar 07 11:45:59 managed-node2 kernel: catatonit[31575]: segfault at a9b80 ip 00007f139f341dbb sp 00007fff16d48850 error 4 in catatonit[4dbb,7f139f33e000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:59 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:59 managed-node2 systemd-coredump[31577]: Process 31575 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:59 managed-node2 systemd[1]: Started systemd-coredump@8-31577-0.service - Process Core Dump (PID 31577/UID 0). ░░ Subject: A start job for unit systemd-coredump@8-31577-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@8-31577-0.service has finished successfully. ░░ ░░ The job identifier is 2144. Mar 07 11:45:59 managed-node2 systemd[29271]: Started rootless-netns-461281de.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 115. Mar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:45:59 managed-node2 kernel: veth0: entered allmulticast mode Mar 07 11:45:59 managed-node2 kernel: veth0: entered promiscuous mode Mar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:45:59 managed-node2 systemd-coredump[31579]: Process 31575 (catatonit) of user 3001 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007f139f341dbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 31575 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 31575 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:59 managed-node2 systemd[1]: systemd-coredump@8-31577-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@8-31577-0.service has successfully entered the 'dead' state. Mar 07 11:45:59 managed-node2 conmon[31573]: conmon 879669150b8fd150356b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-3001.slice/user@3001.service/user.slice/libpod-879669150b8fd150356b3d47d7f340be20e76730c97501db0eb82939c5fc9bd1.scope/container/memory.events Mar 07 11:45:59 managed-node2 systemd[29271]: Started run-p31598-i31599.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 119. Mar 07 11:45:59 managed-node2 systemd[29271]: Started libpod-fa374669b33b370f44864c8cec6d40cc032a256cc26f6dca99aab760c7fd4b53.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 123. Mar 07 11:45:59 managed-node2 kernel: catatonit[31627]: segfault at a9b80 ip 00007fb82f5dbdbb sp 00007fff96168e50 error 4 in catatonit[4dbb,7fb82f5d8000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:45:59 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:59 managed-node2 systemd-coredump[31632]: Process 31627 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:59 managed-node2 systemd[1]: Started systemd-coredump@9-31632-0.service - Process Core Dump (PID 31632/UID 0). ░░ Subject: A start job for unit systemd-coredump@9-31632-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@9-31632-0.service has finished successfully. ░░ ░░ The job identifier is 2153. Mar 07 11:45:59 managed-node2 systemd[29271]: Started libpod-309bf1ae864fe28aa1049bfc71c43fecc7211dc1b56ee779eeec10eb7def34c6.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 128. Mar 07 11:45:59 managed-node2 podman[31542]: Pod: Mar 07 11:45:59 managed-node2 systemd[29271]: podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range Mar 07 11:45:59 managed-node2 systemd[29271]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 91. Mar 07 11:45:59 managed-node2 podman[31542]: 02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a Mar 07 11:45:59 managed-node2 podman[31542]: Container: Mar 07 11:45:59 managed-node2 podman[31542]: 309bf1ae864fe28aa1049bfc71c43fecc7211dc1b56ee779eeec10eb7def34c6 Mar 07 11:45:59 managed-node2 systemd-coredump[31633]: Process 31627 (catatonit) of user 3001 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007fb82f5dbdbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 31627 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 31627 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:45:59 managed-node2 systemd[1]: systemd-coredump@9-31632-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@9-31632-0.service has successfully entered the 'dead' state. Mar 07 11:45:59 managed-node2 sudo[31529]: pam_unix(sudo:session): session closed for user podman_basic_user Mar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:45:59 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Mar 07 11:45:59 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Mar 07 11:45:59 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:45:59 managed-node2 kernel: catatonit[31700]: segfault at a9b80 ip 00007fea7c676dbb sp 00007ffef7d06ba0 error 4 in catatonit[4dbb,7fea7c673000+77000] likely on CPU 0 (core 0, socket 0) Mar 07 11:45:59 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:45:59 managed-node2 systemd-coredump[31705]: Process 31700 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:45:59 managed-node2 systemd[1]: Started systemd-coredump@10-31705-0.service - Process Core Dump (PID 31705/UID 0). ░░ Subject: A start job for unit systemd-coredump@10-31705-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@10-31705-0.service has finished successfully. ░░ ░░ The job identifier is 2162. Mar 07 11:46:00 managed-node2 systemd-coredump[31708]: Process 31700 (catatonit) of user 3001 dumped core. Module /usr/libexec/catatonit/catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 31700: #0 0x00007fea7c676dbb __libc_setup_tls (/usr/libexec/catatonit/catatonit + 0x4dbb) #1 0x00007fea7c676a79 __libc_start_main_impl (/usr/libexec/catatonit/catatonit + 0x4a79) #2 0x00007fea7c6744e5 _start (/usr/libexec/catatonit/catatonit + 0x24e5) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 31700 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 31700 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:00 managed-node2 systemd[1]: systemd-coredump@10-31705-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@10-31705-0.service has successfully entered the 'dead' state. Mar 07 11:46:00 managed-node2 python3.12[31846]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Mar 07 11:46:00 managed-node2 python3.12[32002]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:46:01 managed-node2 python3.12[32159]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:46:02 managed-node2 python3.12[32315]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:02 managed-node2 python3.12[32470]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:03 managed-node2 podman[32648]: 2026-03-07 11:46:03.898635121 -0500 EST m=+0.392627575 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:46:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:04 managed-node2 python3.12[32839]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:46:04 managed-node2 python3.12[32994]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:05 managed-node2 python3.12[33149]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Mar 07 11:46:05 managed-node2 python3.12[33274]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901964.9849694-14845-101719091169581/.source.yml _original_basename=.e466nazg follow=False checksum=3ff675c4424d0c6a65148416b04367244e5cae81 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:05 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.010232203 -0500 EST m=+0.016945230 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge) Mar 07 11:46:06 managed-node2 systemd[1]: Created slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice - cgroup machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice. ░░ Subject: A start job for unit machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice has finished successfully. ░░ ░░ The job identifier is 2171. Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.051819164 -0500 EST m=+0.058532100 container create ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58) Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.055915225 -0500 EST m=+0.062628141 pod create 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2) Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.081531085 -0500 EST m=+0.088244108 container create 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1039] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.05840266 -0500 EST m=+0.065115702 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:06 managed-node2 kernel: veth0: entered allmulticast mode Mar 07 11:46:06 managed-node2 kernel: veth0: entered promiscuous mode Mar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:46:06 managed-node2 (udev-worker)[33449]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1191] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1201] device (veth0): carrier: link connected Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1203] device (podman1): carrier: link connected Mar 07 11:46:06 managed-node2 (udev-worker)[33448]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1470] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1475] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1481] device (podman1): Activation: starting connection 'podman1' (7024fc22-fe75-4cea-afb7-75608193f035) Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1484] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1486] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1488] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1531] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Mar 07 11:46:06 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2177. Mar 07 11:46:06 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2177. Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1973] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1976] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.1982] device (podman1): Activation: successful, device activated. Mar 07 11:46:06 managed-node2 systemd[1]: Started run-p33482-i33483.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p33482-i33483.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p33482-i33483.scope has finished successfully. ░░ ░░ The job identifier is 2256. Mar 07 11:46:06 managed-node2 aardvark-dns[33482]: starting aardvark on a child with pid 33489 Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Successfully parsed config Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Listen v6 ip {} Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Mar 07 11:46:06 managed-node2 systemd[1]: Started libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope. ░░ Subject: A start job for unit libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has finished successfully. ░░ ░░ The job identifier is 2262. Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : terminal_ctrl_fd: 12 Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : winsz read side: 16, winsz write side: 17 Mar 07 11:46:06 managed-node2 systemd[1]: Started libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope - libcrun container. ░░ Subject: A start job for unit libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has finished successfully. ░░ ░░ The job identifier is 2269. Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : container PID: 33495 Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.278669784 -0500 EST m=+0.285382817 container init ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58) Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.281608185 -0500 EST m=+0.288321275 container start ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58) Mar 07 11:46:06 managed-node2 kernel: catatonit[33495]: segfault at a9b80 ip 00007f4ad6a29dbb sp 00007ffcbefcdc50 error 4 in catatonit[4dbb,7f4ad6a26000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:46:06 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:06 managed-node2 systemd-coredump[33497]: Process 33495 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:06 managed-node2 systemd[1]: Started systemd-coredump@11-33497-0.service - Process Core Dump (PID 33497/UID 0). ░░ Subject: A start job for unit systemd-coredump@11-33497-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@11-33497-0.service has finished successfully. ░░ ░░ The job identifier is 2276. Mar 07 11:46:06 managed-node2 systemd[1]: Started libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope. ░░ Subject: A start job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished successfully. ░░ ░░ The job identifier is 2285. Mar 07 11:46:06 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/11/attach} Mar 07 11:46:06 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : terminal_ctrl_fd: 11 Mar 07 11:46:06 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : winsz read side: 15, winsz write side: 16 Mar 07 11:46:06 managed-node2 systemd[1]: Started libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope - libcrun container. ░░ Subject: A start job for unit libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished successfully. ░░ ░░ The job identifier is 2292. Mar 07 11:46:06 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : container PID: 33503 Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.352403787 -0500 EST m=+0.359116948 container init 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.357465195 -0500 EST m=+0.364178282 container start 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Mar 07 11:46:06 managed-node2 podman[33436]: 2026-03-07 11:46:06.361993922 -0500 EST m=+0.368706973 pod start 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2) Mar 07 11:46:06 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Mar 07 11:46:06 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 Container: 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 Mar 07 11:46:06 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2026-03-07T11:46:06-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2026-03-07T11:46:06-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2026-03-07T11:46:06-05:00" level=info msg="Setting parallel job count to 7" time="2026-03-07T11:46:06-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2026-03-07T11:46:06-05:00" level=info msg="Using sqlite as database backend" time="2026-03-07T11:46:06-05:00" level=debug msg="Using graph driver overlay" time="2026-03-07T11:46:06-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Using run root /run/containers/storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2026-03-07T11:46:06-05:00" level=debug msg="Using tmp dir /run/libpod" time="2026-03-07T11:46:06-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2026-03-07T11:46:06-05:00" level=debug msg="Using transient store: false" time="2026-03-07T11:46:06-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2026-03-07T11:46:06-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2026-03-07T11:46:06-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2026-03-07T11:46:06-05:00" level=debug msg="Initializing event backend journald" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2026-03-07T11:46:06-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d bridge podman1 2026-03-07 11:44:18.828483768 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2026-03-07T11:46:06-05:00" level=debug msg="Successfully loaded 2 networks" time="2026-03-07T11:46:06-05:00" level=debug msg="Pod using bridge network mode" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice for parent machine.slice and name libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="no command or entrypoint provided, and no CMD or ENTRYPOINT from image: defaulting to empty string" time="2026-03-07T11:46:06-05:00" level=debug msg="using systemd mode: false" time="2026-03-07T11:46:06-05:00" level=debug msg="setting container name 09b7f33e3afd-infra" time="2026-03-07T11:46:06-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Allocated lock 1 for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2026-03-07T11:46:06-05:00" level=debug msg="Created container \"ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Container \"ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\" has work directory \"/var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Container \"ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b\" has run directory \"/run/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:46:06-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:46:06-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2026-03-07T11:46:06-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:46:06-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:46:06-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2026-03-07T11:46:06-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2026-03-07T11:46:06-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2026-03-07T11:46:06-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:46:06-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2026-03-07T11:46:06-05:00" level=debug msg="using systemd mode: false" time="2026-03-07T11:46:06-05:00" level=debug msg="adding container to pod httpd2" time="2026-03-07T11:46:06-05:00" level=debug msg="setting container name httpd2-httpd2" time="2026-03-07T11:46:06-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2026-03-07T11:46:06-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /proc" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /dev" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /dev/pts" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /dev/mqueue" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /sys" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2026-03-07T11:46:06-05:00" level=debug msg="Allocated lock 2 for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08" time="2026-03-07T11:46:06-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Created container \"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Container \"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\" has work directory \"/var/lib/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Container \"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\" has run directory \"/run/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Strongconnecting node ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="Pushed ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b onto stack" time="2026-03-07T11:46:06-05:00" level=debug msg="Finishing node ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b. Popped ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b off stack" time="2026-03-07T11:46:06-05:00" level=debug msg="Strongconnecting node 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08" time="2026-03-07T11:46:06-05:00" level=debug msg="Pushed 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 onto stack" time="2026-03-07T11:46:06-05:00" level=debug msg="Finishing node 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08. Popped 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 off stack" time="2026-03-07T11:46:06-05:00" level=debug msg="Made network namespace at /run/netns/netns-57a5144e-40ac-4a85-01ac-9226ddb3e6f8 for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="Created root filesystem for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b at /var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/rootfs/merge" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::bridge] Using mtu 9001 from default route interface for the network [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/ip_forward to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/route_localnet to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::sysctl] Setting sysctl value for net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink_route] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_52ab27bf_10_89_0_0_nm24 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "ee:98:79:da:ba:e0", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2026-03-07T11:46:06-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2026-03-07T11:46:06-05:00" level=debug msg="Setting Cgroups for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b to machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice:libpod:ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2026-03-07T11:46:06-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/rootfs/merge\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Created OCI spec for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b at /var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata/config.json" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice for parent machine.slice and name libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2026-03-07T11:46:06-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b -u ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata -p /run/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata/pidfile -n 09b7f33e3afd-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b]" time="2026-03-07T11:46:06-05:00" level=info msg="Running conmon under slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice and unitName libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope" time="2026-03-07T11:46:06-05:00" level=debug msg="Received: 33495" time="2026-03-07T11:46:06-05:00" level=info msg="Got Conmon PID as 33493" time="2026-03-07T11:46:06-05:00" level=debug msg="Created container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b in OCI runtime" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2026-03-07T11:46:06-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2026-03-07T11:46:06-05:00" level=debug msg="Starting container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b with command [/catatonit -P]" time="2026-03-07T11:46:06-05:00" level=debug msg="Started container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" time="2026-03-07T11:46:06-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/MVNQJ7CO6BHDSUCUPVG3N5YCHU,upperdir=/var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/diff,workdir=/var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c91,c172\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Mounted container \"9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08\" at \"/var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/merged\"" time="2026-03-07T11:46:06-05:00" level=debug msg="Created root filesystem for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 at /var/lib/containers/storage/overlay/b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054/merged" time="2026-03-07T11:46:06-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2026-03-07T11:46:06-05:00" level=debug msg="Setting Cgroups for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 to machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice:libpod:9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08" time="2026-03-07T11:46:06-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2026-03-07T11:46:06-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2026-03-07T11:46:06-05:00" level=debug msg="Created OCI spec for container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 at /var/lib/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata/config.json" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice for parent machine.slice and name libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58" time="2026-03-07T11:46:06-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice" time="2026-03-07T11:46:06-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2026-03-07T11:46:06-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 -u 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata -p /run/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08]" time="2026-03-07T11:46:06-05:00" level=info msg="Running conmon under slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice and unitName libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope" time="2026-03-07T11:46:06-05:00" level=debug msg="Received: 33503" time="2026-03-07T11:46:06-05:00" level=info msg="Got Conmon PID as 33500" time="2026-03-07T11:46:06-05:00" level=debug msg="Created container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 in OCI runtime" time="2026-03-07T11:46:06-05:00" level=debug msg="Starting container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 with command [/bin/busybox-extras httpd -f -p 80]" time="2026-03-07T11:46:06-05:00" level=debug msg="Started container 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08" time="2026-03-07T11:46:06-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2026-03-07T11:46:06-05:00" level=debug msg="Shutting down engines" time="2026-03-07T11:46:06-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=33436 Mar 07 11:46:06 managed-node2 python3.12[33429]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Mar 07 11:46:06 managed-node2 systemd-coredump[33499]: Process 33495 (catatonit) of user 0 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007f4ad6a29dbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 33495 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 33495 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:06 managed-node2 systemd[1]: libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has successfully entered the 'dead' state. Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : container 33495 exited with status 139 Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice/libpod-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope/container/memory.events Mar 07 11:46:06 managed-node2 conmon[33493]: conmon ab7fab317dc05955f6c3 : Cgroup appears to have been removed, stopping OOM monitoring Mar 07 11:46:06 managed-node2 systemd[1]: systemd-coredump@11-33497-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@11-33497-0.service has successfully entered the 'dead' state. Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b)" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="Setting parallel job count to 7" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="Using sqlite as database backend" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using graph driver overlay" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using run root /run/containers/storage" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using tmp dir /run/libpod" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using transient store: false" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that metacopy is being used" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Initializing event backend journald" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Mar 07 11:46:06 managed-node2 podman[33506]: 2026-03-07 11:46:06.442858751 -0500 EST m=+0.031283817 container died ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra) Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Cleaning up container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Tearing down network namespace at /run/netns/netns-57a5144e-40ac-4a85-01ac-9226ddb3e6f8 for container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d bridge podman1 2026-03-07 11:44:18.828483768 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Successfully loaded 2 networks" Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Received SIGHUP Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Successfully parsed config Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Listen v4 ip {} Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: Listen v6 ip {} Mar 07 11:46:06 managed-node2 aardvark-dns[33489]: No configuration found stopping the sever Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Tearing down..\n" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [INFO netavark::firewall] Using nftables firewall driver\n" Mar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:06 managed-node2 systemd[1]: run-p33482-i33483.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p33482-i33483.scope has successfully entered the 'dead' state. Mar 07 11:46:06 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Mar 07 11:46:06 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Mar 07 11:46:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [INFO netavark::network::bridge] removing bridge podman1\n" Mar 07 11:46:06 managed-node2 NetworkManager[807]: [1772901966.4783] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"INPUT\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(Meta(Meta { key: L4proto })), right: Named(Set([Element(String(\"tcp\")), Element(String(\"udp\"))])), op: EQ }), Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"th\", field: \"dport\" }))), right: Number(53), op: EQ }), Accept(None)], handle: Some(23), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"daddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(CT(CT { key: \"state\", family: None, dir: None })), right: List([String(\"established\"), String(\"related\")]), op: IN }), Accept(None)], handle: Some(24), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Accept(None)], handle: Some(25), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"POSTROUTING\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Jump(JumpTarget { target: \"nv_52ab27bf_10_89_0_0_nm24\" })], handle: Some(26), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 4 rules\n[DEBUG netavark::firewall::nft] Found chain nv_52ab27bf_10_89_0_0_nm24\n" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [DEBUG netavark::firewall::firewalld] Removing firewalld rules for IPs 10.89.0.0/24\n" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"NETAVARK-ISOLATION-3\", expr: [Match(Match { left: Named(Meta(Meta { key: Oifname })), right: String(\"podman1\"), op: EQ }), Drop(None)], handle: Some(17), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 1 isolation rules for network\n" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Found chain nv_52ab27bf_10_89_0_0_nm24_dnat\n[DEBUG netavark::firewall::nft] Found chain nv_52ab27bf_10_89_0_0_nm24_dnat\n" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Teardown complete\n" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Successfully cleaned up container ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b" Mar 07 11:46:06 managed-node2 podman[33506]: 2026-03-07 11:46:06.54231054 -0500 EST m=+0.130735647 container cleanup ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58) Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b)" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=debug msg="Shutting down engines" Mar 07 11:46:06 managed-node2 /usr/bin/podman[33506]: time="2026-03-07T11:46:06-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=33506 Mar 07 11:46:06 managed-node2 systemd[1]: libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b.scope has successfully entered the 'dead' state. Mar 07 11:46:06 managed-node2 python3.12[33684]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Mar 07 11:46:06 managed-node2 systemd[1]: Reload requested from client PID 33685 ('systemctl') (unit session-7.scope)... Mar 07 11:46:06 managed-node2 systemd[1]: Reloading... Mar 07 11:46:07 managed-node2 systemd-rc-local-generator[33727]: /etc/rc.d/rc.local is not marked executable, skipping. Mar 07 11:46:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b-rootfs-merge.mount has successfully entered the 'dead' state. Mar 07 11:46:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b-userdata-shm.mount has successfully entered the 'dead' state. Mar 07 11:46:07 managed-node2 systemd[1]: run-netns-netns\x2d57a5144e\x2d40ac\x2d4a85\x2d01ac\x2d9226ddb3e6f8.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d57a5144e\x2d40ac\x2d4a85\x2d01ac\x2d9226ddb3e6f8.mount has successfully entered the 'dead' state. Mar 07 11:46:07 managed-node2 systemd[1]: Reloading finished in 222 ms. Mar 07 11:46:07 managed-node2 python3.12[33906]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Mar 07 11:46:07 managed-node2 systemd[1]: Reload requested from client PID 33909 ('systemctl') (unit session-7.scope)... Mar 07 11:46:07 managed-node2 systemd[1]: Reloading... Mar 07 11:46:07 managed-node2 systemd-rc-local-generator[33960]: /etc/rc.d/rc.local is not marked executable, skipping. Mar 07 11:46:07 managed-node2 systemd[1]: Reloading finished in 214 ms. Mar 07 11:46:08 managed-node2 python3.12[34130]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Mar 07 11:46:08 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2377. Mar 07 11:46:08 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2299. Mar 07 11:46:08 managed-node2 podman[34134]: 2026-03-07 11:46:08.575544958 -0500 EST m=+0.022390462 pod stop 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2) Mar 07 11:46:10 managed-node2 podman[31698]: time="2026-03-07T11:46:10-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Mar 07 11:46:10 managed-node2 systemd[29271]: Removed slice user-libpod_pod_02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a.slice - cgroup user-libpod_pod_02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 133 and the job result is done. Mar 07 11:46:10 managed-node2 podman[31698]: Pods stopped: Mar 07 11:46:10 managed-node2 podman[31698]: 02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a Mar 07 11:46:10 managed-node2 podman[31698]: Pods removed: Mar 07 11:46:10 managed-node2 podman[31698]: 02f11f9afe1ee04f80235fd5ebaa8d7c14a419d9fd64311a73e19e61d207ff7a Mar 07 11:46:10 managed-node2 podman[31698]: Secrets removed: Mar 07 11:46:10 managed-node2 podman[31698]: Volumes removed: Mar 07 11:46:10 managed-node2 systemd[29271]: podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service: Consumed 753ms CPU time, 80.9M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Mar 07 11:46:16 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Mar 07 11:46:18 managed-node2 podman[34134]: time="2026-03-07T11:46:18-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Mar 07 11:46:18 managed-node2 conmon[33500]: conmon 9f1ff57323f385e2fc23 : container 33503 exited with status 137 Mar 07 11:46:18 managed-node2 systemd[1]: libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has successfully entered the 'dead' state. Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.605105738 -0500 EST m=+10.051951370 container died 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08)" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=info msg="Setting parallel job count to 7" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=info msg="Using sqlite as database backend" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using graph driver overlay" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using run root /run/containers/storage" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using tmp dir /run/libpod" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using transient store: false" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Cached value indicated that overlay is supported" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Cached value indicated that metacopy is being used" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Initializing event backend journald" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Mar 07 11:46:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-b5968753590b66a1c26e99cceae0a7a09fa402941da9e3e7750147bb33180054-merged.mount has successfully entered the 'dead' state. Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.639159774 -0500 EST m=+10.086005199 container cleanup 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=info msg="Received shutdown signal \"terminated\", terminating!" PID=34157 Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=info msg="Invoking shutdown handler \"libpod\"" PID=34157 Mar 07 11:46:18 managed-node2 systemd[1]: Stopping libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope... ░░ Subject: A stop job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has begun execution. ░░ ░░ The job identifier is 2385. Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --hooks-dir /usr/share/containers/oci/hooks.d --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08)" Mar 07 11:46:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:18 managed-node2 /usr/bin/podman[34157]: time="2026-03-07T11:46:18-05:00" level=debug msg="Completed shutdown handler \"libpod\", duration 0s" PID=34157 Mar 07 11:46:18 managed-node2 systemd[1]: libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has successfully entered the 'dead' state. Mar 07 11:46:18 managed-node2 systemd[1]: Stopped libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope. ░░ Subject: A stop job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08.scope has finished. ░░ ░░ The job identifier is 2385 and the job result is done. Mar 07 11:46:18 managed-node2 systemd[1]: Removed slice machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice - cgroup machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice. ░░ Subject: A stop job for unit machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58.slice has finished. ░░ ░░ The job identifier is 2384 and the job result is done. Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.687385492 -0500 EST m=+10.134230912 pod stop 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.7198332 -0500 EST m=+10.166678633 container remove 9f1ff57323f385e2fc23dfb7788a74daf6b5808447eb93a86c5945d619bd2b08 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.74111814 -0500 EST m=+10.187963572 container remove ab7fab317dc05955f6c38ecca2caeef57bc6a619d51e7cf243e49fb09889e73b (image=, name=09b7f33e3afd-infra, pod_id=09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.748131526 -0500 EST m=+10.194976927 pod remove 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 (image=, name=httpd2) Mar 07 11:46:18 managed-node2 podman[34134]: Pods stopped: Mar 07 11:46:18 managed-node2 podman[34134]: 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 Mar 07 11:46:18 managed-node2 podman[34134]: Pods removed: Mar 07 11:46:18 managed-node2 podman[34134]: 09b7f33e3afd30e7750bfd257cbee46c9f2c17f99792753499dbeb34c5efdc58 Mar 07 11:46:18 managed-node2 podman[34134]: Secrets removed: Mar 07 11:46:18 managed-node2 podman[34134]: Volumes removed: Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.7485712 -0500 EST m=+10.195416609 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.765571666 -0500 EST m=+10.212417099 container create f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice - cgroup machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice. ░░ Subject: A start job for unit machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice has finished successfully. ░░ ░░ The job identifier is 2388. Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.805707536 -0500 EST m=+10.252552936 container create 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.809765847 -0500 EST m=+10.256611247 pod create 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 (image=, name=httpd2) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.811528719 -0500 EST m=+10.258374297 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.833756495 -0500 EST m=+10.280601983 container create 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.834103154 -0500 EST m=+10.280948599 container restart f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:18 managed-node2 systemd[1]: Started libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope - libcrun container. ░░ Subject: A start job for unit libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope has finished successfully. ░░ ░░ The job identifier is 2394. Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.897730576 -0500 EST m=+10.344576055 container init f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:18 managed-node2 podman[34134]: 2026-03-07 11:46:18.900084574 -0500 EST m=+10.346930106 container start f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:18 managed-node2 kernel: catatonit[34171]: segfault at a9b80 ip 00007ff9206acdbb sp 00007ffc502fe200 error 4 in catatonit[4dbb,7ff9206a9000+77000] likely on CPU 0 (core 0, socket 0) Mar 07 11:46:18 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:18 managed-node2 systemd-coredump[34173]: Process 34171 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:18 managed-node2 systemd[1]: Started systemd-coredump@12-34173-0.service - Process Core Dump (PID 34173/UID 0). ░░ Subject: A start job for unit systemd-coredump@12-34173-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@12-34173-0.service has finished successfully. ░░ ░░ The job identifier is 2400. Mar 07 11:46:18 managed-node2 kernel: veth0: entered allmulticast mode Mar 07 11:46:18 managed-node2 kernel: veth0: entered promiscuous mode Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9395] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:46:18 managed-node2 (udev-worker)[34175]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:18 managed-node2 (udev-worker)[34176]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9459] device (podman1): carrier: link connected Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9484] device (veth0): carrier: link connected Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9487] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9699] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9734] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9762] device (podman1): Activation: starting connection 'podman1' (a476fb3c-4953-4a99-8c37-c91c928220c1) Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9763] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9766] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9767] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Mar 07 11:46:18 managed-node2 NetworkManager[807]: [1772901978.9770] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Mar 07 11:46:18 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2409. Mar 07 11:46:19 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2409. Mar 07 11:46:19 managed-node2 NetworkManager[807]: [1772901979.0167] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Mar 07 11:46:19 managed-node2 NetworkManager[807]: [1772901979.0175] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Mar 07 11:46:19 managed-node2 NetworkManager[807]: [1772901979.0184] device (podman1): Activation: successful, device activated. Mar 07 11:46:19 managed-node2 systemd[1]: Started run-p34216-i34217.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p34216-i34217.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p34216-i34217.scope has finished successfully. ░░ ░░ The job identifier is 2488. Mar 07 11:46:19 managed-node2 systemd-coredump[34177]: Process 34171 (catatonit) of user 0 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007ff9206acdbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 34171 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 34171 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:19 managed-node2 systemd[1]: libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope has successfully entered the 'dead' state. Mar 07 11:46:19 managed-node2 conmon[34169]: conmon f021e1ac269371a1a5c6 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a.scope/container/memory.events Mar 07 11:46:19 managed-node2 systemd[1]: systemd-coredump@12-34173-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@12-34173-0.service has successfully entered the 'dead' state. Mar 07 11:46:19 managed-node2 podman[34226]: 2026-03-07 11:46:19.118293703 -0500 EST m=+0.022854670 container died f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:19 managed-node2 systemd[1]: Started libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope - libcrun container. ░░ Subject: A start job for unit libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope has finished successfully. ░░ ░░ The job identifier is 2494. Mar 07 11:46:19 managed-node2 podman[34226]: 2026-03-07 11:46:19.144694335 -0500 EST m=+0.049255264 container cleanup f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.149467711 -0500 EST m=+10.596313208 container init 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.1518268 -0500 EST m=+10.598672204 container start 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:19 managed-node2 kernel: catatonit[34227]: segfault at a9b80 ip 00007f45bdca9dbb sp 00007fff72636820 error 4 in catatonit[4dbb,7f45bdca6000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:46:19 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:19 managed-node2 systemd-coredump[34240]: Process 34227 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:19 managed-node2 systemd[1]: Started systemd-coredump@13-34240-0.service - Process Core Dump (PID 34240/UID 0). ░░ Subject: A start job for unit systemd-coredump@13-34240-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@13-34240-0.service has finished successfully. ░░ ░░ The job identifier is 2501. Mar 07 11:46:19 managed-node2 systemd[1]: Started libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope - libcrun container. ░░ Subject: A start job for unit libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope has finished successfully. ░░ ░░ The job identifier is 2510. Mar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.197461095 -0500 EST m=+10.644306639 container init 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Mar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.200754871 -0500 EST m=+10.647600358 container start 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test) Mar 07 11:46:19 managed-node2 podman[34134]: 2026-03-07 11:46:19.205111382 -0500 EST m=+10.651956899 pod start 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 (image=, name=httpd2) Mar 07 11:46:19 managed-node2 podman[34134]: Pod: Mar 07 11:46:19 managed-node2 podman[34134]: 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 Mar 07 11:46:19 managed-node2 podman[34134]: Container: Mar 07 11:46:19 managed-node2 podman[34134]: 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 Mar 07 11:46:19 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range Mar 07 11:46:19 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2299. Mar 07 11:46:19 managed-node2 podman[34248]: 2026-03-07 11:46:19.270404055 -0500 EST m=+0.035233803 pod stop 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 (image=, name=httpd2) Mar 07 11:46:19 managed-node2 systemd-coredump[34241]: Process 34227 (catatonit) of user 0 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007f45bdca9dbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 34227 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 34227 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:19 managed-node2 systemd[1]: libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d.scope has successfully entered the 'dead' state. Mar 07 11:46:19 managed-node2 systemd[1]: systemd-coredump@13-34240-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@13-34240-0.service has successfully entered the 'dead' state. Mar 07 11:46:19 managed-node2 podman[34283]: 2026-03-07 11:46:19.327174521 -0500 EST m=+0.016512016 container died 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:19 managed-node2 systemd[1]: run-p34216-i34217.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p34216-i34217.scope has successfully entered the 'dead' state. Mar 07 11:46:19 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:19 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Mar 07 11:46:19 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Mar 07 11:46:19 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:19 managed-node2 NetworkManager[807]: [1772901979.3664] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Mar 07 11:46:19 managed-node2 podman[34283]: 2026-03-07 11:46:19.426367765 -0500 EST m=+0.115705152 container cleanup 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a-rootfs-merge.mount has successfully entered the 'dead' state. Mar 07 11:46:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a-userdata-shm.mount has successfully entered the 'dead' state. Mar 07 11:46:20 managed-node2 python3.12[34439]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:46:21 managed-node2 python3.12[34596]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:46:21 managed-node2 python3.12[34752]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:22 managed-node2 python3.12[34907]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:23 managed-node2 podman[35085]: 2026-03-07 11:46:23.121556991 -0500 EST m=+0.299999620 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:46:23 managed-node2 python3.12[35275]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Mar 07 11:46:24 managed-node2 python3.12[35430]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:24 managed-node2 python3.12[35585]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Mar 07 11:46:24 managed-node2 python3.12[35710]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1772901984.2287433-15536-81376403194330/.source.yml _original_basename=.8zbijrin follow=False checksum=4ea4a304b347a6aaa397596e57cb6db94ea16b46 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:25 managed-node2 python3.12[35865]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.275495475 -0500 EST m=+0.013172190 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge) Mar 07 11:46:25 managed-node2 systemd[1]: Created slice machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice - cgroup machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice. ░░ Subject: A start job for unit machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice has finished successfully. ░░ ░░ The job identifier is 2517. Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.311817507 -0500 EST m=+0.049494315 container create 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d) Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.316459547 -0500 EST m=+0.054136253 pod create 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d (image=, name=httpd3) Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.342690191 -0500 EST m=+0.080367000 container create fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3550] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/7) Mar 07 11:46:25 managed-node2 (udev-worker)[35883]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.318601722 -0500 EST m=+0.056278508 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:46:25 managed-node2 kernel: veth0: entered allmulticast mode Mar 07 11:46:25 managed-node2 kernel: veth0: entered promiscuous mode Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3822] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Mar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:46:25 managed-node2 (udev-worker)[35885]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3879] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3889] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3899] device (podman1): Activation: starting connection 'podman1' (6d8bf7d8-ae00-4628-b267-3ec15c9e992b) Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3904] device (veth0): carrier: link connected Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3907] device (podman1): carrier: link connected Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3909] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3915] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3918] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3922] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3944] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3948] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.3952] device (podman1): Activation: successful, device activated. Mar 07 11:46:25 managed-node2 systemd[1]: Started run-p35920-i35921.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p35920-i35921.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p35920-i35921.scope has finished successfully. ░░ ░░ The job identifier is 2523. Mar 07 11:46:25 managed-node2 systemd[1]: Started libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope. ░░ Subject: A start job for unit libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has finished successfully. ░░ ░░ The job identifier is 2529. Mar 07 11:46:25 managed-node2 systemd[1]: Started libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope - libcrun container. ░░ Subject: A start job for unit libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has finished successfully. ░░ ░░ The job identifier is 2536. Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.521896266 -0500 EST m=+0.259573093 container init 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d) Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.524546913 -0500 EST m=+0.262223671 container start 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d) Mar 07 11:46:25 managed-node2 kernel: catatonit[35928]: segfault at a9b80 ip 00007f6f72c16dbb sp 00007ffefa9d1c40 error 4 in catatonit[4dbb,7f6f72c13000+77000] likely on CPU 0 (core 0, socket 0) Mar 07 11:46:25 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:25 managed-node2 systemd-coredump[35930]: Process 35928 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:25 managed-node2 systemd[1]: Started systemd-coredump@14-35930-0.service - Process Core Dump (PID 35930/UID 0). ░░ Subject: A start job for unit systemd-coredump@14-35930-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@14-35930-0.service has finished successfully. ░░ ░░ The job identifier is 2543. Mar 07 11:46:25 managed-node2 systemd[1]: Started libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope. ░░ Subject: A start job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished successfully. ░░ ░░ The job identifier is 2552. Mar 07 11:46:25 managed-node2 systemd[1]: Started libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope - libcrun container. ░░ Subject: A start job for unit libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished successfully. ░░ ░░ The job identifier is 2559. Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.585157387 -0500 EST m=+0.322834314 container init fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.589721993 -0500 EST m=+0.327398858 container start fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:25 managed-node2 podman[35872]: 2026-03-07 11:46:25.594062087 -0500 EST m=+0.331738925 pod start 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d (image=, name=httpd3) Mar 07 11:46:25 managed-node2 systemd-coredump[35931]: Process 35928 (catatonit) of user 0 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007f6f72c16dbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 35928 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 35928 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:25 managed-node2 systemd[1]: libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has successfully entered the 'dead' state. Mar 07 11:46:25 managed-node2 systemd[1]: systemd-coredump@14-35930-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@14-35930-0.service has successfully entered the 'dead' state. Mar 07 11:46:25 managed-node2 podman[35941]: 2026-03-07 11:46:25.677668355 -0500 EST m=+0.029940803 container died 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra) Mar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:25 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Mar 07 11:46:25 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Mar 07 11:46:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:25 managed-node2 systemd[1]: run-p35920-i35921.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p35920-i35921.scope has successfully entered the 'dead' state. Mar 07 11:46:25 managed-node2 NetworkManager[807]: [1772901985.7169] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Mar 07 11:46:25 managed-node2 systemd[1]: run-netns-netns\x2d642376bf\x2d1b5a\x2dbb61\x2d6a3b\x2dfe70bdea2608.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d642376bf\x2d1b5a\x2dbb61\x2d6a3b\x2dfe70bdea2608.mount has successfully entered the 'dead' state. Mar 07 11:46:25 managed-node2 podman[35941]: 2026-03-07 11:46:25.791383924 -0500 EST m=+0.143656388 container cleanup 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d) Mar 07 11:46:25 managed-node2 systemd[1]: libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1.scope has successfully entered the 'dead' state. Mar 07 11:46:26 managed-node2 python3.12[36118]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Mar 07 11:46:26 managed-node2 systemd[1]: Reload requested from client PID 36119 ('systemctl') (unit session-7.scope)... Mar 07 11:46:26 managed-node2 systemd[1]: Reloading... Mar 07 11:46:26 managed-node2 systemd-rc-local-generator[36161]: /etc/rc.d/rc.local is not marked executable, skipping. Mar 07 11:46:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1-userdata-shm.mount has successfully entered the 'dead' state. Mar 07 11:46:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1-rootfs-merge.mount has successfully entered the 'dead' state. Mar 07 11:46:26 managed-node2 systemd[1]: Reloading finished in 222 ms. Mar 07 11:46:27 managed-node2 python3.12[36340]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Mar 07 11:46:27 managed-node2 systemd[1]: Reload requested from client PID 36343 ('systemctl') (unit session-7.scope)... Mar 07 11:46:27 managed-node2 systemd[1]: Reloading... Mar 07 11:46:27 managed-node2 systemd-rc-local-generator[36385]: /etc/rc.d/rc.local is not marked executable, skipping. Mar 07 11:46:27 managed-node2 systemd[1]: Reloading finished in 215 ms. Mar 07 11:46:27 managed-node2 python3.12[36564]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Mar 07 11:46:27 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2566. Mar 07 11:46:27 managed-node2 podman[36568]: 2026-03-07 11:46:27.898059361 -0500 EST m=+0.023361638 pod stop 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d (image=, name=httpd3) Mar 07 11:46:29 managed-node2 podman[34248]: time="2026-03-07T11:46:29-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Mar 07 11:46:29 managed-node2 systemd[1]: libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope has successfully entered the 'dead' state. Mar 07 11:46:29 managed-node2 conmon[34243]: conmon 1d68ffb6e0e9d84fa87d : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice/libpod-1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75.scope/container/memory.events Mar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.301508379 -0500 EST m=+10.066338217 container died 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:29 managed-node2 systemd[1]: var-lib-containers-storage-overlay-0c3bc18cb09098ff9f2d69e62d54abe819e567e28ca84575505d06b7f7092c88-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-0c3bc18cb09098ff9f2d69e62d54abe819e567e28ca84575505d06b7f7092c88-merged.mount has successfully entered the 'dead' state. Mar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.336665522 -0500 EST m=+10.101495209 container cleanup 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:29 managed-node2 systemd[1]: Removed slice machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice - cgroup machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice. ░░ Subject: A stop job for unit machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409.slice has finished. ░░ ░░ The job identifier is 2651 and the job result is done. Mar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.366179073 -0500 EST m=+10.131008702 container remove 1d68ffb6e0e9d84fa87db4b0176b299cecc98b12c73df1d9c94d51d99ee5dc75 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.387294034 -0500 EST m=+10.152123658 container remove 44039aa5e1ebf4d937070f7965c00aa25e3923aa3a9bbeda80748c5a7c308d6d (image=, name=51116cf045b6-infra, pod_id=51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.394892423 -0500 EST m=+10.159722015 pod remove 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 (image=, name=httpd2) Mar 07 11:46:29 managed-node2 podman[34248]: 2026-03-07 11:46:29.413822806 -0500 EST m=+10.178652430 container remove f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a (image=, name=5fde841b1f32-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Mar 07 11:46:29 managed-node2 podman[34248]: time="2026-03-07T11:46:29-05:00" level=error msg="Checking whether service of container f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a can be stopped: no container with ID f021e1ac269371a1a5c6a10cbace1a1c1e80d53b6c53acc2349ec2654da1cd2a found in database: no such container" Mar 07 11:46:29 managed-node2 podman[34248]: Pods stopped: Mar 07 11:46:29 managed-node2 podman[34248]: 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 Mar 07 11:46:29 managed-node2 podman[34248]: Pods removed: Mar 07 11:46:29 managed-node2 podman[34248]: 51116cf045b674683843afa03bd0a46575f8c7be78f3b86c505cd1ac152df409 Mar 07 11:46:29 managed-node2 podman[34248]: Secrets removed: Mar 07 11:46:29 managed-node2 podman[34248]: Volumes removed: Mar 07 11:46:29 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state. Mar 07 11:46:35 managed-node2 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 2653. Mar 07 11:46:35 managed-node2 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Mar 07 11:46:35 managed-node2 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 2653. Mar 07 11:46:35 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Mar 07 11:46:37 managed-node2 podman[36568]: time="2026-03-07T11:46:37-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Mar 07 11:46:37 managed-node2 systemd[1]: libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has successfully entered the 'dead' state. Mar 07 11:46:37 managed-node2 podman[36568]: 2026-03-07 11:46:37.926712157 -0500 EST m=+10.052014514 container died fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Mar 07 11:46:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ee6d4c1dab7f57321763e3e557cd90f2d6b0b9b7aeaf3ef8eab8ca49efa608d6-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-ee6d4c1dab7f57321763e3e557cd90f2d6b0b9b7aeaf3ef8eab8ca49efa608d6-merged.mount has successfully entered the 'dead' state. Mar 07 11:46:37 managed-node2 podman[36568]: 2026-03-07 11:46:37.960570271 -0500 EST m=+10.085872435 container cleanup fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Mar 07 11:46:37 managed-node2 systemd[1]: Stopping libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope... ░░ Subject: A stop job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has begun execution. ░░ ░░ The job identifier is 2732. Mar 07 11:46:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:37 managed-node2 systemd[1]: libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has successfully entered the 'dead' state. Mar 07 11:46:37 managed-node2 systemd[1]: Stopped libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope. ░░ Subject: A stop job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7.scope has finished. ░░ ░░ The job identifier is 2732 and the job result is done. Mar 07 11:46:38 managed-node2 systemd[1]: Removed slice machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice - cgroup machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice. ░░ Subject: A stop job for unit machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d.slice has finished. ░░ ░░ The job identifier is 2731 and the job result is done. Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.035367566 -0500 EST m=+10.160669765 container remove fc48a3b7d96d06e5ce7faa5e2de98761f1e693df53be1a1d4d23f631231ca4f7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.056136818 -0500 EST m=+10.181439022 container remove 76a48636e9a25f290e4fd5f9ffeb7cbe3d8546bc303a2f553d855fc320ba95d1 (image=, name=1d147d13572f-infra, pod_id=1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.063189022 -0500 EST m=+10.188491194 pod remove 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d (image=, name=httpd3) Mar 07 11:46:38 managed-node2 podman[36568]: Pods stopped: Mar 07 11:46:38 managed-node2 podman[36568]: 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d Mar 07 11:46:38 managed-node2 podman[36568]: Pods removed: Mar 07 11:46:38 managed-node2 podman[36568]: 1d147d13572f00a60c2f9b16d87b34afe23ab9e5114f7dd5525b58afa680701d Mar 07 11:46:38 managed-node2 podman[36568]: Secrets removed: Mar 07 11:46:38 managed-node2 podman[36568]: Volumes removed: Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.063738172 -0500 EST m=+10.189040347 network create 52ab27bfef1b2cd8ca8a90965203a8be62dc3a6112e122b8c0f2e1617f59128d (name=podman-default-kube-network, type=bridge) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.080611451 -0500 EST m=+10.205913648 container create 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 systemd[1]: Created slice machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice - cgroup machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice. ░░ Subject: A start job for unit machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice has finished successfully. ░░ ░░ The job identifier is 2734. Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.119628454 -0500 EST m=+10.244930639 container create f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.12361059 -0500 EST m=+10.248912760 pod create 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a (image=, name=httpd3) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.147717736 -0500 EST m=+10.273020009 container create e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.148072438 -0500 EST m=+10.273374646 container restart 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.125319335 -0500 EST m=+10.250621668 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Mar 07 11:46:38 managed-node2 systemd[1]: Started libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope - libcrun container. ░░ Subject: A start job for unit libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope has finished successfully. ░░ ░░ The job identifier is 2740. Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.187397166 -0500 EST m=+10.312699428 container init 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.189764724 -0500 EST m=+10.315067060 container start 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 kernel: catatonit[36613]: segfault at a9b80 ip 00007f2b0a99fdbb sp 00007ffeeae7b9e0 error 4 in catatonit[4dbb,7f2b0a99c000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:46:38 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:38 managed-node2 systemd-coredump[36615]: Process 36613 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:38 managed-node2 systemd[1]: Started systemd-coredump@15-36615-0.service - Process Core Dump (PID 36615/UID 0). ░░ Subject: A start job for unit systemd-coredump@15-36615-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@15-36615-0.service has finished successfully. ░░ ░░ The job identifier is 2746. Mar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:38 managed-node2 kernel: veth0: entered allmulticast mode Mar 07 11:46:38 managed-node2 kernel: veth0: entered promiscuous mode Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2247] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Mar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Mar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Mar 07 11:46:38 managed-node2 (udev-worker)[36618]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2297] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2309] device (veth0): carrier: link connected Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2312] device (podman1): carrier: link connected Mar 07 11:46:38 managed-node2 (udev-worker)[36619]: Network interface NamePolicy= disabled on kernel command line. Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2590] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2595] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2603] device (podman1): Activation: starting connection 'podman1' (2d400bb2-5548-4489-9134-38d8ab37ffca) Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2605] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2608] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2624] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2626] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Mar 07 11:46:38 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2755. Mar 07 11:46:38 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2755. Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2978] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2981] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.2986] device (podman1): Activation: successful, device activated. Mar 07 11:46:38 managed-node2 systemd[1]: Started run-p36659-i36660.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p36659-i36660.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p36659-i36660.scope has finished successfully. ░░ ░░ The job identifier is 2834. Mar 07 11:46:38 managed-node2 systemd-coredump[36617]: Process 36613 (catatonit) of user 0 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007f2b0a99fdbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 36613 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 36613 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:38 managed-node2 systemd[1]: Started libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope - libcrun container. ░░ Subject: A start job for unit libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope has finished successfully. ░░ ░░ The job identifier is 2840. Mar 07 11:46:38 managed-node2 systemd[1]: libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64.scope has successfully entered the 'dead' state. Mar 07 11:46:38 managed-node2 systemd[1]: systemd-coredump@15-36615-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@15-36615-0.service has successfully entered the 'dead' state. Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.385243424 -0500 EST m=+10.510545838 container init f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.389173233 -0500 EST m=+10.514475479 container start f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 kernel: catatonit[36666]: segfault at a9b80 ip 00007fbf485bcdbb sp 00007ffec6bbd630 error 4 in catatonit[4dbb,7fbf485b9000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:46:38 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:38 managed-node2 systemd-coredump[36678]: Process 36666 (catatonit) of user 0 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:38 managed-node2 systemd[1]: Started systemd-coredump@16-36678-0.service - Process Core Dump (PID 36678/UID 0). ░░ Subject: A start job for unit systemd-coredump@16-36678-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@16-36678-0.service has finished successfully. ░░ ░░ The job identifier is 2847. Mar 07 11:46:38 managed-node2 podman[36668]: 2026-03-07 11:46:38.417671038 -0500 EST m=+0.036061473 container died 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 systemd[1]: Started libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope - libcrun container. ░░ Subject: A start job for unit libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope has finished successfully. ░░ ░░ The job identifier is 2856. Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.454475365 -0500 EST m=+10.579777839 container init e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.458255485 -0500 EST m=+10.583557732 container start e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 podman[36668]: 2026-03-07 11:46:38.460012504 -0500 EST m=+0.078402768 container cleanup 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 podman[36568]: 2026-03-07 11:46:38.467228715 -0500 EST m=+10.592531020 pod start 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a (image=, name=httpd3) Mar 07 11:46:38 managed-node2 podman[36568]: Pod: Mar 07 11:46:38 managed-node2 podman[36568]: 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a Mar 07 11:46:38 managed-node2 podman[36568]: Container: Mar 07 11:46:38 managed-node2 podman[36568]: e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda Mar 07 11:46:38 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range Mar 07 11:46:38 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2566. Mar 07 11:46:38 managed-node2 systemd-coredump[36679]: Process 36666 (catatonit) of user 0 dumped core. Module /catatonit from rpm catatonit-0.2.1-3.el10.x86_64 Stack trace of thread 1: #0 0x00007fbf485bcdbb n/a (/catatonit + 0x4dbb) ELF object binary architecture: AMD x86-64 ░░ Subject: Process 36666 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 36666 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:38 managed-node2 systemd[1]: libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope has successfully entered the 'dead' state. Mar 07 11:46:38 managed-node2 conmon[36664]: conmon f4619e7c96f87ca49266 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice/libpod-f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55.scope/container/memory.events Mar 07 11:46:38 managed-node2 systemd[1]: systemd-coredump@16-36678-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@16-36678-0.service has successfully entered the 'dead' state. Mar 07 11:46:38 managed-node2 podman[36689]: 2026-03-07 11:46:38.559767062 -0500 EST m=+0.051261217 pod stop 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a (image=, name=httpd3) Mar 07 11:46:38 managed-node2 podman[36704]: 2026-03-07 11:46:38.587832463 -0500 EST m=+0.021982125 container died f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 systemd[1]: run-p36659-i36660.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p36659-i36660.scope has successfully entered the 'dead' state. Mar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:38 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Mar 07 11:46:38 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Mar 07 11:46:38 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Mar 07 11:46:38 managed-node2 NetworkManager[807]: [1772901998.6196] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Mar 07 11:46:38 managed-node2 podman[36704]: 2026-03-07 11:46:38.676650677 -0500 EST m=+0.110800341 container cleanup f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64-rootfs-merge.mount has successfully entered the 'dead' state. Mar 07 11:46:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64-userdata-shm.mount has successfully entered the 'dead' state. Mar 07 11:46:39 managed-node2 sudo[36930]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bhiauhbpazjvrfebfoyyszwwljeltpxs ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1772901998.8167956-16021-135055247896982/AnsiballZ_command.py' Mar 07 11:46:39 managed-node2 sudo[36930]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Mar 07 11:46:39 managed-node2 python3.12[36933]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:46:39 managed-node2 kernel: catatonit[36946]: segfault at a9b80 ip 00007f0938c79dbb sp 00007ffd32edf6e0 error 4 in catatonit[4dbb,7f0938c76000+77000] likely on CPU 1 (core 0, socket 0) Mar 07 11:46:39 managed-node2 kernel: Code: 66 66 66 2e 0f 1f 84 00 00 00 00 00 0f 1f 00 48 83 c0 38 48 39 d0 73 4f 83 38 07 75 f2 4c 8b 60 30 bd 40 00 00 00 48 8b 50 10 <49> 03 17 48 8b 48 20 49 39 ec 4c 8b 68 28 48 89 54 24 08 49 0f 43 Mar 07 11:46:39 managed-node2 systemd-coredump[36953]: Process 36946 (catatonit) of user 3001 terminated abnormally with signal 11/SEGV, processing... Mar 07 11:46:39 managed-node2 systemd[1]: Started systemd-coredump@17-36953-0.service - Process Core Dump (PID 36953/UID 0). ░░ Subject: A start job for unit systemd-coredump@17-36953-0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-coredump@17-36953-0.service has finished successfully. ░░ ░░ The job identifier is 2863. Mar 07 11:46:39 managed-node2 systemd[29271]: Started podman-36940.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 136. Mar 07 11:46:39 managed-node2 systemd[29271]: Started podman-pause-7a6a691e.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 140. Mar 07 11:46:39 managed-node2 systemd-coredump[36955]: Resource limits disable core dumping for process 36946 (catatonit). Mar 07 11:46:39 managed-node2 systemd-coredump[36955]: Process 36946 (catatonit) of user 3001 terminated abnormally without generating a coredump. ░░ Subject: Process 36946 (catatonit) dumped core ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: man:core(5) ░░ ░░ Process 36946 (catatonit) crashed and dumped core. ░░ ░░ This usually indicates a programming error in the crashing program and ░░ should be reported to its vendor as a bug. Mar 07 11:46:39 managed-node2 systemd[1]: systemd-coredump@17-36953-0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-coredump@17-36953-0.service has successfully entered the 'dead' state. Mar 07 11:46:39 managed-node2 sudo[36930]: pam_unix(sudo:session): session closed for user podman_basic_user Mar 07 11:46:39 managed-node2 python3.12[37114]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:46:40 managed-node2 python3.12[37276]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:46:48 managed-node2 podman[36689]: time="2026-03-07T11:46:48-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Mar 07 11:46:48 managed-node2 systemd[1]: libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda.scope has successfully entered the 'dead' state. Mar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.588635338 -0500 EST m=+10.080129492 container died e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Mar 07 11:46:48 managed-node2 systemd[1]: var-lib-containers-storage-overlay-8566ab6694c8721216d4c8d23c7b9a958cf4d626dd678590315232721a4623bc-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-8566ab6694c8721216d4c8d23c7b9a958cf4d626dd678590315232721a4623bc-merged.mount has successfully entered the 'dead' state. Mar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.624299419 -0500 EST m=+10.115793441 container cleanup e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Mar 07 11:46:48 managed-node2 systemd[1]: Removed slice machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice - cgroup machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice. ░░ Subject: A stop job for unit machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a.slice has finished. ░░ ░░ The job identifier is 2872 and the job result is done. Mar 07 11:46:48 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:48 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Mar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.658377824 -0500 EST m=+10.149871873 container remove e317e901b204de4da75d414d3ccf265055b9f6742869185bda594ef2552c7bda (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Mar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.679532751 -0500 EST m=+10.171026794 container remove f4619e7c96f87ca49266c1911ce2489ed525f0837d66536bf7968a6eeb593f55 (image=, name=139618a222b9-infra, pod_id=139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.686508191 -0500 EST m=+10.178002210 pod remove 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a (image=, name=httpd3) Mar 07 11:46:48 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:48 managed-node2 podman[36689]: 2026-03-07 11:46:48.714211879 -0500 EST m=+10.205706029 container remove 870ac0084975620f32807b509cdf5529e4234749f251a3bb957deb2155579e64 (image=, name=b3ce3ab91ba6-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Mar 07 11:46:48 managed-node2 podman[36689]: Pods stopped: Mar 07 11:46:48 managed-node2 podman[36689]: 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a Mar 07 11:46:48 managed-node2 podman[36689]: Pods removed: Mar 07 11:46:48 managed-node2 podman[36689]: 139618a222b959e5b16e5e73f911d5b49eed9771b77cd162715105a12613361a Mar 07 11:46:48 managed-node2 podman[36689]: Secrets removed: Mar 07 11:46:48 managed-node2 podman[36689]: Volumes removed: Mar 07 11:46:48 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state. Mar 07 11:46:49 managed-node2 python3.12[37452]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Mar 07 11:46:49 managed-node2 python3.12[37608]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:49 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Mar 07 11:46:49 managed-node2 python3.12[37763]: ansible-file Invoked with path=/tmp/lsr_od4netlk_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Mar 07 11:46:50 managed-node2 sshd-session[37789]: Accepted publickey for root from 10.31.12.168 port 57886 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Mar 07 11:46:50 managed-node2 systemd-logind[759]: New session 10 of user root. ░░ Subject: A new session 10 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 10 has been created for the user root. ░░ ░░ The leading process of the session is 37789. Mar 07 11:46:50 managed-node2 systemd[1]: Started session-10.scope - Session 10 of User root. ░░ Subject: A start job for unit session-10.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-10.scope has finished successfully. ░░ ░░ The job identifier is 2874. Mar 07 11:46:50 managed-node2 sshd-session[37789]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Mar 07 11:46:50 managed-node2 sshd-session[37792]: Received disconnect from 10.31.12.168 port 57886:11: disconnected by user Mar 07 11:46:50 managed-node2 sshd-session[37792]: Disconnected from user root 10.31.12.168 port 57886 Mar 07 11:46:50 managed-node2 sshd-session[37789]: pam_unix(sshd:session): session closed for user root Mar 07 11:46:50 managed-node2 systemd[1]: session-10.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-10.scope has successfully entered the 'dead' state. Mar 07 11:46:50 managed-node2 systemd-logind[759]: Session 10 logged out. Waiting for processes to exit. Mar 07 11:46:50 managed-node2 systemd-logind[759]: Removed session 10. ░░ Subject: Session 10 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 10 has been terminated. Mar 07 11:46:51 managed-node2 sshd-session[37873]: Accepted publickey for root from 10.31.12.168 port 57898 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Mar 07 11:46:51 managed-node2 systemd-logind[759]: New session 11 of user root. ░░ Subject: A new session 11 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 11 has been created for the user root. ░░ ░░ The leading process of the session is 37873. Mar 07 11:46:51 managed-node2 systemd[1]: Started session-11.scope - Session 11 of User root. ░░ Subject: A start job for unit session-11.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-11.scope has finished successfully. ░░ ░░ The job identifier is 2956. Mar 07 11:46:51 managed-node2 sshd-session[37873]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)