ansible-playbook [core 2.17.7] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.13/site-packages/ansible ansible collection location = /tmp/collections-c17 executable location = /usr/local/bin/ansible-playbook python version = 3.13.1 (main, Dec 9 2024, 00:00:00) [GCC 14.2.1 20240912 (Red Hat 14.2.1-3)] (/usr/bin/python3.13) jinja version = 3.1.5 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_pod.yml ************************************************ 2 plays in /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:5 Saturday 25 January 2025 11:35:58 -0500 (0:00:00.014) 0:00:00.014 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-GFB/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role can manage quadlet pods] **************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 Saturday 25 January 2025 11:35:58 -0500 (0:00:00.030) 0:00:00.045 ****** [WARNING]: Platform linux on host managed-node3 is using the discovered Python interpreter at /usr/bin/python3, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node3] TASK [Run the role - root] ***************************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:34 Saturday 25 January 2025 11:35:59 -0500 (0:00:01.278) 0:00:01.323 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 25 January 2025 11:35:59 -0500 (0:00:00.092) 0:00:01.416 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 25 January 2025 11:35:59 -0500 (0:00:00.039) 0:00:01.455 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 25 January 2025 11:35:59 -0500 (0:00:00.050) 0:00:01.505 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 25 January 2025 11:36:00 -0500 (0:00:00.527) 0:00:02.033 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 25 January 2025 11:36:00 -0500 (0:00:00.023) 0:00:02.056 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 25 January 2025 11:36:00 -0500 (0:00:00.389) 0:00:02.446 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 25 January 2025 11:36:00 -0500 (0:00:00.036) 0:00:02.482 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [managed-node3] => (item=Fedora.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 25 January 2025 11:36:00 -0500 (0:00:00.064) 0:00:02.547 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 25 January 2025 11:36:01 -0500 (0:00:01.185) 0:00:03.732 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 25 January 2025 11:36:01 -0500 (0:00:00.042) 0:00:03.775 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 25 January 2025 11:36:01 -0500 (0:00:00.045) 0:00:03.820 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 25 January 2025 11:36:02 -0500 (0:00:00.042) 0:00:03.863 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 25 January 2025 11:36:02 -0500 (0:00:00.040) 0:00:03.904 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 25 January 2025 11:36:02 -0500 (0:00:00.041) 0:00:03.945 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.030258", "end": "2025-01-25 11:36:02.516717", "rc": 0, "start": "2025-01-25 11:36:02.486459" } STDOUT: podman version 5.3.2 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 25 January 2025 11:36:02 -0500 (0:00:00.505) 0:00:04.451 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 25 January 2025 11:36:02 -0500 (0:00:00.033) 0:00:04.484 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 25 January 2025 11:36:02 -0500 (0:00:00.032) 0:00:04.516 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 25 January 2025 11:36:02 -0500 (0:00:00.046) 0:00:04.563 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 25 January 2025 11:36:02 -0500 (0:00:00.051) 0:00:04.614 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 25 January 2025 11:36:02 -0500 (0:00:00.067) 0:00:04.682 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 25 January 2025 11:36:02 -0500 (0:00:00.049) 0:00:04.731 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:36:02 -0500 (0:00:00.059) 0:00:04.791 ****** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:36:03 -0500 (0:00:00.477) 0:00:05.268 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:36:03 -0500 (0:00:00.034) 0:00:05.303 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:36:03 -0500 (0:00:00.043) 0:00:05.346 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:36:03 -0500 (0:00:00.394) 0:00:05.741 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:36:03 -0500 (0:00:00.031) 0:00:05.772 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:36:03 -0500 (0:00:00.030) 0:00:05.803 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:36:03 -0500 (0:00:00.034) 0:00:05.837 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.030) 0:00:05.867 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.030) 0:00:05.898 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.029) 0:00:05.928 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.029) 0:00:05.958 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.029) 0:00:05.987 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.066) 0:00:06.054 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.056) 0:00:06.111 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.030) 0:00:06.141 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.029) 0:00:06.170 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.059) 0:00:06.230 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.029) 0:00:06.260 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.030) 0:00:06.290 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.055) 0:00:06.346 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.031) 0:00:06.378 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.029) 0:00:06.407 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.059) 0:00:06.466 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.031) 0:00:06.497 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.033) 0:00:06.531 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.030) 0:00:06.561 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.029) 0:00:06.590 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.029) 0:00:06.620 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.029) 0:00:06.650 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.028) 0:00:06.678 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.030) 0:00:06.709 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.026) 0:00:06.736 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.027) 0:00:06.763 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 25 January 2025 11:36:04 -0500 (0:00:00.025) 0:00:06.789 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.084) 0:00:06.873 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.039) 0:00:06.912 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.038) 0:00:06.951 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.030) 0:00:06.982 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.052) 0:00:07.035 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.061) 0:00:07.096 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.034) 0:00:07.130 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.032) 0:00:07.163 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.041) 0:00:07.204 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.392) 0:00:07.597 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.032) 0:00:07.629 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.035) 0:00:07.665 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.031) 0:00:07.697 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.031) 0:00:07.728 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.035) 0:00:07.764 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:36:05 -0500 (0:00:00.052) 0:00:07.816 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.047) 0:00:07.864 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.072) 0:00:07.937 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.059) 0:00:07.997 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.035) 0:00:08.033 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.037) 0:00:08.070 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.074) 0:00:08.145 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.038) 0:00:08.183 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.039) 0:00:08.222 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.097) 0:00:08.320 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.079) 0:00:08.400 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.035) 0:00:08.436 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.037) 0:00:08.474 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.036) 0:00:08.510 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.029) 0:00:08.540 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 25 January 2025 11:36:06 -0500 (0:00:00.031) 0:00:08.571 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 4096, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 25 January 2025 11:36:07 -0500 (0:00:00.518) 0:00:09.090 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 25 January 2025 11:36:07 -0500 (0:00:00.045) 0:00:09.135 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 25 January 2025 11:36:07 -0500 (0:00:00.056) 0:00:09.192 ****** changed: [managed-node3] => { "changed": true, "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "dest": "/etc/containers/systemd/quadlet-pod-pod.pod", "gid": 0, "group": "root", "md5sum": "43c9e9c2ff3ad9cd27c1f2d12f03aee0", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1737822967.3900256-19327-74940849916097/.source.pod", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 25 January 2025 11:36:08 -0500 (0:00:00.835) 0:00:10.028 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 25 January 2025 11:36:09 -0500 (0:00:01.037) 0:00:11.065 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-pod-pod.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target basic.target system.slice -.mount systemd-journald.socket sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893915648", "EffectiveMemoryMax": "3893915648", "EffectiveTasksMax": "4417", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14725", "LimitNPROCSoft": "14725", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14725", "LimitSIGPENDINGSoft": "14725", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3495100416", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount sysinit.target system.slice", "RequiresMountsFor": "/run/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4417", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 25 January 2025 11:36:10 -0500 (0:00:01.361) 0:00:12.427 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 25 January 2025 11:36:10 -0500 (0:00:00.053) 0:00:12.481 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 25 January 2025 11:36:10 -0500 (0:00:00.073) 0:00:12.554 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 25 January 2025 11:36:10 -0500 (0:00:00.064) 0:00:12.619 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 25 January 2025 11:36:10 -0500 (0:00:00.052) 0:00:12.672 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 25 January 2025 11:36:10 -0500 (0:00:00.080) 0:00:12.752 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:36:10 -0500 (0:00:00.103) 0:00:12.856 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.065) 0:00:12.921 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.060) 0:00:12.981 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.067) 0:00:13.048 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.410) 0:00:13.459 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.033) 0:00:13.492 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.040) 0:00:13.533 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.059) 0:00:13.592 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.038) 0:00:13.631 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.039) 0:00:13.671 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.034) 0:00:13.705 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.033) 0:00:13.738 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.039) 0:00:13.778 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 25 January 2025 11:36:11 -0500 (0:00:00.062) 0:00:13.841 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 25 January 2025 11:36:12 -0500 (0:00:00.035) 0:00:13.876 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 25 January 2025 11:36:12 -0500 (0:00:00.031) 0:00:13.908 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 25 January 2025 11:36:12 -0500 (0:00:00.092) 0:00:14.000 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 25 January 2025 11:36:12 -0500 (0:00:00.080) 0:00:14.080 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 25 January 2025 11:36:12 -0500 (0:00:00.036) 0:00:14.116 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 25 January 2025 11:36:12 -0500 (0:00:00.110) 0:00:14.227 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 25 January 2025 11:36:12 -0500 (0:00:00.056) 0:00:14.283 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 25 January 2025 11:36:12 -0500 (0:00:00.030) 0:00:14.313 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 25 January 2025 11:36:12 -0500 (0:00:00.033) 0:00:14.346 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 25 January 2025 11:36:12 -0500 (0:00:00.030) 0:00:14.377 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 25 January 2025 11:36:12 -0500 (0:00:00.027) 0:00:14.404 ****** changed: [managed-node3] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 25 January 2025 11:36:14 -0500 (0:00:01.642) 0:00:16.046 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 4096, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 25 January 2025 11:36:14 -0500 (0:00:00.523) 0:00:16.570 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 25 January 2025 11:36:14 -0500 (0:00:00.066) 0:00:16.637 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 25 January 2025 11:36:14 -0500 (0:00:00.077) 0:00:16.714 ****** changed: [managed-node3] => { "changed": true, "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "dest": "/etc/containers/systemd/quadlet-pod-container.container", "gid": 0, "group": "root", "md5sum": "daaf6e904ff3c17edeb801084cfe256f", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 230, "src": "/root/.ansible/tmp/ansible-tmp-1737822974.913526-19573-58279194552398/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 25 January 2025 11:36:15 -0500 (0:00:00.878) 0:00:17.593 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 25 January 2025 11:36:16 -0500 (0:00:00.952) 0:00:18.545 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-container.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket quadlet-pod-pod-pod.service -.mount sysinit.target network-online.target basic.target system.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "multi-user.target shutdown.target", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893915648", "EffectiveMemoryMax": "3893915648", "EffectiveTasksMax": "4417", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14725", "LimitNPROCSoft": "14725", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14725", "LimitSIGPENDINGSoft": "14725", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3474518016", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4417", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "quadlet-pod-pod-pod.service multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 25 January 2025 11:36:17 -0500 (0:00:00.745) 0:00:19.291 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 25 January 2025 11:36:17 -0500 (0:00:00.045) 0:00:19.337 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 25 January 2025 11:36:17 -0500 (0:00:00.029) 0:00:19.366 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 25 January 2025 11:36:17 -0500 (0:00:00.037) 0:00:19.404 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:40 Saturday 25 January 2025 11:36:17 -0500 (0:00:00.083) 0:00:19.488 ****** ok: [managed-node3] => (item=quadlet-pod-container.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-pod-container.container" ], "delta": "0:00:00.003280", "end": "2025-01-25 11:36:17.955756", "item": "quadlet-pod-container.container", "rc": 0, "start": "2025-01-25 11:36:17.952476" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/libpod/testimage:20210610 ContainerName=quadlet-pod-container Pod=quadlet-pod-pod.pod Exec=/bin/busybox-extras httpd -f -p 80 ok: [managed-node3] => (item=quadlet-pod-pod.pod) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-pod-pod.pod" ], "delta": "0:00:00.003283", "end": "2025-01-25 11:36:18.312973", "item": "quadlet-pod-pod.pod", "rc": 0, "start": "2025-01-25 11:36:18.309690" } STDOUT: # # Ansible managed # # system_role:podman [Pod] PodName=quadlet-pod TASK [Check pod] *************************************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:49 Saturday 25 January 2025 11:36:18 -0500 (0:00:00.760) 0:00:20.249 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "pod", "inspect", "quadlet-pod", "--format", "{{range .Containers}}{{.Name}}\n{{end}}" ], "delta": "0:00:00.043589", "end": "2025-01-25 11:36:18.749841", "failed_when_result": false, "rc": 0, "start": "2025-01-25 11:36:18.706252" } STDOUT: quadlet-pod-infra quadlet-pod-container TASK [Create user for testing] ************************************************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:57 Saturday 25 January 2025 11:36:18 -0500 (0:00:00.433) 0:00:20.682 ****** changed: [managed-node3] => { "changed": true, "comment": "", "create_home": true, "group": 2223, "home": "/home/user_quadlet_pod", "name": "user_quadlet_pod", "shell": "/bin/bash", "state": "present", "system": false, "uid": 2223 } TASK [Run the role - user] ***************************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:62 Saturday 25 January 2025 11:36:19 -0500 (0:00:00.689) 0:00:21.371 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 25 January 2025 11:36:19 -0500 (0:00:00.170) 0:00:21.542 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 25 January 2025 11:36:19 -0500 (0:00:00.056) 0:00:21.598 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 25 January 2025 11:36:19 -0500 (0:00:00.042) 0:00:21.641 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 25 January 2025 11:36:19 -0500 (0:00:00.038) 0:00:21.680 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 25 January 2025 11:36:19 -0500 (0:00:00.033) 0:00:21.713 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 25 January 2025 11:36:19 -0500 (0:00:00.029) 0:00:21.743 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 25 January 2025 11:36:19 -0500 (0:00:00.029) 0:00:21.772 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [managed-node3] => (item=Fedora.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 25 January 2025 11:36:19 -0500 (0:00:00.065) 0:00:21.838 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 25 January 2025 11:36:20 -0500 (0:00:01.006) 0:00:22.844 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 25 January 2025 11:36:21 -0500 (0:00:00.054) 0:00:22.898 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 25 January 2025 11:36:21 -0500 (0:00:00.060) 0:00:22.959 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 25 January 2025 11:36:21 -0500 (0:00:00.051) 0:00:23.011 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 25 January 2025 11:36:21 -0500 (0:00:00.056) 0:00:23.067 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 25 January 2025 11:36:21 -0500 (0:00:00.051) 0:00:23.119 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.028041", "end": "2025-01-25 11:36:21.628642", "rc": 0, "start": "2025-01-25 11:36:21.600601" } STDOUT: podman version 5.3.2 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 25 January 2025 11:36:21 -0500 (0:00:00.438) 0:00:23.557 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 25 January 2025 11:36:21 -0500 (0:00:00.034) 0:00:23.592 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 25 January 2025 11:36:21 -0500 (0:00:00.029) 0:00:23.621 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 25 January 2025 11:36:21 -0500 (0:00:00.033) 0:00:23.655 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 25 January 2025 11:36:21 -0500 (0:00:00.042) 0:00:23.698 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 25 January 2025 11:36:21 -0500 (0:00:00.067) 0:00:23.765 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 25 January 2025 11:36:21 -0500 (0:00:00.054) 0:00:23.819 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:36:22 -0500 (0:00:00.074) 0:00:23.894 ****** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "user_quadlet_pod": [ "x", "2223", "2223", "", "/home/user_quadlet_pod", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:36:22 -0500 (0:00:00.397) 0:00:24.292 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:36:22 -0500 (0:00:00.041) 0:00:24.334 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:36:22 -0500 (0:00:00.046) 0:00:24.380 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:36:22 -0500 (0:00:00.400) 0:00:24.781 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004039", "end": "2025-01-25 11:36:23.245135", "rc": 0, "start": "2025-01-25 11:36:23.241096" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:36:23 -0500 (0:00:00.398) 0:00:25.179 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005906", "end": "2025-01-25 11:36:23.650322", "rc": 0, "start": "2025-01-25 11:36:23.644416" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:36:23 -0500 (0:00:00.401) 0:00:25.581 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:36:23 -0500 (0:00:00.049) 0:00:25.630 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:36:23 -0500 (0:00:00.032) 0:00:25.663 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:36:23 -0500 (0:00:00.030) 0:00:25.694 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:36:23 -0500 (0:00:00.031) 0:00:25.726 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:36:23 -0500 (0:00:00.036) 0:00:25.762 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 25 January 2025 11:36:23 -0500 (0:00:00.032) 0:00:25.795 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/root/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/root/.config/containers/policy.json", "__podman_registries_conf_file": "/root/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/root/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 25 January 2025 11:36:23 -0500 (0:00:00.040) 0:00:25.835 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.056) 0:00:25.892 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.030) 0:00:25.922 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.034) 0:00:25.956 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.057) 0:00:26.013 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.031) 0:00:26.044 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.031) 0:00:26.076 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.062) 0:00:26.138 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.032) 0:00:26.170 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.031) 0:00:26.201 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.061) 0:00:26.262 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.075) 0:00:26.338 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.033) 0:00:26.372 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.031) 0:00:26.404 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.039) 0:00:26.443 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.031) 0:00:26.475 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.031) 0:00:26.507 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.037) 0:00:26.545 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.028) 0:00:26.573 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.028) 0:00:26.602 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.028) 0:00:26.630 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.027) 0:00:26.657 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.087) 0:00:26.744 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.042) 0:00:26.787 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.039) 0:00:26.827 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 25 January 2025 11:36:24 -0500 (0:00:00.030) 0:00:26.858 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 25 January 2025 11:36:25 -0500 (0:00:00.045) 0:00:26.904 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:36:25 -0500 (0:00:00.062) 0:00:26.967 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:36:25 -0500 (0:00:00.034) 0:00:27.001 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:36:25 -0500 (0:00:00.035) 0:00:27.036 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:36:25 -0500 (0:00:00.043) 0:00:27.080 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:36:25 -0500 (0:00:00.394) 0:00:27.474 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004510", "end": "2025-01-25 11:36:25.935938", "rc": 0, "start": "2025-01-25 11:36:25.931428" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.396) 0:00:27.871 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006153", "end": "2025-01-25 11:36:26.340526", "rc": 0, "start": "2025-01-25 11:36:26.334373" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.400) 0:00:28.271 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.048) 0:00:28.320 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.032) 0:00:28.353 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.031) 0:00:28.385 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.032) 0:00:28.417 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.043) 0:00:28.461 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.035) 0:00:28.496 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.054) 0:00:28.551 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.033) 0:00:28.584 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.032) 0:00:28.617 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.074) 0:00:28.691 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.043) 0:00:28.735 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.031) 0:00:28.766 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 25 January 2025 11:36:26 -0500 (0:00:00.066) 0:00:28.833 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 25 January 2025 11:36:27 -0500 (0:00:00.053) 0:00:28.886 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "loginctl", "enable-linger", "user_quadlet_pod" ], "delta": "0:00:00.018913", "end": "2025-01-25 11:36:27.370590", "rc": 0, "start": "2025-01-25 11:36:27.351677" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 25 January 2025 11:36:27 -0500 (0:00:00.455) 0:00:29.342 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 25 January 2025 11:36:27 -0500 (0:00:00.039) 0:00:29.381 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 25 January 2025 11:36:27 -0500 (0:00:00.033) 0:00:29.415 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 25 January 2025 11:36:27 -0500 (0:00:00.027) 0:00:29.443 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 25 January 2025 11:36:27 -0500 (0:00:00.033) 0:00:29.477 ****** changed: [managed-node3] => { "changed": true, "gid": 2223, "group": "user_quadlet_pod", "mode": "0755", "owner": "user_quadlet_pod", "path": "/home/user_quadlet_pod/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 4096, "state": "directory", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 25 January 2025 11:36:28 -0500 (0:00:00.407) 0:00:29.884 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 25 January 2025 11:36:28 -0500 (0:00:00.036) 0:00:29.921 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 25 January 2025 11:36:28 -0500 (0:00:00.031) 0:00:29.952 ****** changed: [managed-node3] => { "changed": true, "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "dest": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "gid": 2223, "group": "user_quadlet_pod", "md5sum": "43c9e9c2ff3ad9cd27c1f2d12f03aee0", "mode": "0644", "owner": "user_quadlet_pod", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1737822988.1400375-20005-238412978124666/.source.pod", "state": "file", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 25 January 2025 11:36:28 -0500 (0:00:00.740) 0:00:30.693 ****** [WARNING]: Module remote_tmp /home/user_quadlet_pod/.ansible/tmp did not exist and was created with a mode of 0700, this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 25 January 2025 11:36:29 -0500 (0:00:00.697) 0:00:31.390 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-pod-pod.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount basic.target app.slice podman-user-wait-network-online.service run-user-2223.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/user/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893915648", "EffectiveMemoryMax": "3893915648", "EffectiveTasksMax": "4417", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14725", "LimitNPROCSoft": "14725", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14725", "LimitSIGPENDINGSoft": "14725", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3886493696", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/user/2223/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4417", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 25 January 2025 11:36:30 -0500 (0:00:01.444) 0:00:32.835 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 25 January 2025 11:36:31 -0500 (0:00:00.036) 0:00:32.872 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 25 January 2025 11:36:31 -0500 (0:00:00.041) 0:00:32.913 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 25 January 2025 11:36:31 -0500 (0:00:00.040) 0:00:32.954 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 25 January 2025 11:36:31 -0500 (0:00:00.036) 0:00:32.990 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 25 January 2025 11:36:31 -0500 (0:00:00.068) 0:00:33.058 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:36:31 -0500 (0:00:00.106) 0:00:33.164 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:36:31 -0500 (0:00:00.047) 0:00:33.211 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:36:31 -0500 (0:00:00.039) 0:00:33.251 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:36:31 -0500 (0:00:00.049) 0:00:33.300 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:36:31 -0500 (0:00:00.407) 0:00:33.707 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004446", "end": "2025-01-25 11:36:32.188334", "rc": 0, "start": "2025-01-25 11:36:32.183888" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:36:32 -0500 (0:00:00.409) 0:00:34.117 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006887", "end": "2025-01-25 11:36:32.587568", "rc": 0, "start": "2025-01-25 11:36:32.580681" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:36:32 -0500 (0:00:00.409) 0:00:34.526 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:36:32 -0500 (0:00:00.098) 0:00:34.624 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:36:32 -0500 (0:00:00.034) 0:00:34.658 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:36:32 -0500 (0:00:00.032) 0:00:34.691 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:36:32 -0500 (0:00:00.032) 0:00:34.723 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:36:32 -0500 (0:00:00.032) 0:00:34.756 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 25 January 2025 11:36:32 -0500 (0:00:00.032) 0:00:34.788 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 25 January 2025 11:36:32 -0500 (0:00:00.061) 0:00:34.849 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 25 January 2025 11:36:33 -0500 (0:00:00.043) 0:00:34.892 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 25 January 2025 11:36:33 -0500 (0:00:00.049) 0:00:34.942 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 25 January 2025 11:36:33 -0500 (0:00:00.085) 0:00:35.028 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 25 January 2025 11:36:33 -0500 (0:00:00.045) 0:00:35.073 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 25 January 2025 11:36:33 -0500 (0:00:00.034) 0:00:35.108 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 25 January 2025 11:36:33 -0500 (0:00:00.079) 0:00:35.188 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 25 January 2025 11:36:33 -0500 (0:00:00.057) 0:00:35.245 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_pod" ], "delta": null, "end": null, "rc": 0, "start": null } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_pod exists MSG: Did not run command since '/var/lib/systemd/linger/user_quadlet_pod' exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 25 January 2025 11:36:33 -0500 (0:00:00.390) 0:00:35.636 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 25 January 2025 11:36:33 -0500 (0:00:00.047) 0:00:35.684 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 25 January 2025 11:36:33 -0500 (0:00:00.037) 0:00:35.722 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 25 January 2025 11:36:33 -0500 (0:00:00.031) 0:00:35.753 ****** changed: [managed-node3] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 25 January 2025 11:36:35 -0500 (0:00:01.777) 0:00:37.531 ****** ok: [managed-node3] => { "changed": false, "gid": 2223, "group": "user_quadlet_pod", "mode": "0755", "owner": "user_quadlet_pod", "path": "/home/user_quadlet_pod/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 4096, "state": "directory", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 25 January 2025 11:36:36 -0500 (0:00:00.429) 0:00:37.960 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 25 January 2025 11:36:36 -0500 (0:00:00.038) 0:00:37.999 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 25 January 2025 11:36:36 -0500 (0:00:00.041) 0:00:38.041 ****** changed: [managed-node3] => { "changed": true, "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "dest": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "gid": 2223, "group": "user_quadlet_pod", "md5sum": "daaf6e904ff3c17edeb801084cfe256f", "mode": "0644", "owner": "user_quadlet_pod", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 230, "src": "/root/.ansible/tmp/ansible-tmp-1737822996.2324553-20266-105628304988263/.source.container", "state": "file", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 25 January 2025 11:36:36 -0500 (0:00:00.731) 0:00:38.772 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 25 January 2025 11:36:37 -0500 (0:00:00.717) 0:00:39.490 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-container.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount run-user-2223.mount podman-user-wait-network-online.service quadlet-pod-pod-pod.service basic.target app.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target default.target", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/user/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893915648", "EffectiveMemoryMax": "3893915648", "EffectiveTasksMax": "4417", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14725", "LimitNPROCSoft": "14725", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14725", "LimitSIGPENDINGSoft": "14725", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3857285120", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4417", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "default.target quadlet-pod-pod-pod.service", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 25 January 2025 11:36:38 -0500 (0:00:00.775) 0:00:40.265 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 25 January 2025 11:36:38 -0500 (0:00:00.032) 0:00:40.298 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 25 January 2025 11:36:38 -0500 (0:00:00.027) 0:00:40.326 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 25 January 2025 11:36:38 -0500 (0:00:00.032) 0:00:40.358 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:70 Saturday 25 January 2025 11:36:38 -0500 (0:00:00.044) 0:00:40.402 ****** ok: [managed-node3] => (item=quadlet-pod-container.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container" ], "delta": "0:00:00.003417", "end": "2025-01-25 11:36:38.871649", "item": "quadlet-pod-container.container", "rc": 0, "start": "2025-01-25 11:36:38.868232" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/libpod/testimage:20210610 ContainerName=quadlet-pod-container Pod=quadlet-pod-pod.pod Exec=/bin/busybox-extras httpd -f -p 80 ok: [managed-node3] => (item=quadlet-pod-pod.pod) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod" ], "delta": "0:00:00.003128", "end": "2025-01-25 11:36:39.233512", "item": "quadlet-pod-pod.pod", "rc": 0, "start": "2025-01-25 11:36:39.230384" } STDOUT: # # Ansible managed # # system_role:podman [Pod] PodName=quadlet-pod TASK [Check pod] *************************************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:79 Saturday 25 January 2025 11:36:39 -0500 (0:00:00.761) 0:00:41.163 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "pod", "inspect", "quadlet-pod", "--format", "{{range .Containers}}{{.Name}}\n{{end}}" ], "delta": "0:00:00.064764", "end": "2025-01-25 11:36:39.770841", "failed_when_result": false, "rc": 0, "start": "2025-01-25 11:36:39.706077" } STDOUT: quadlet-pod-infra quadlet-pod-container TASK [Ensure linger] *********************************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:91 Saturday 25 January 2025 11:36:39 -0500 (0:00:00.574) 0:00:41.737 ****** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1737822987.3613405, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1737822987.3613405, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 152, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1737822987.3613405, "nlink": 1, "path": "/var/lib/systemd/linger/user_quadlet_pod", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "1330913710", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Cleanup user] ************************************************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:99 Saturday 25 January 2025 11:36:40 -0500 (0:00:00.435) 0:00:42.173 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 25 January 2025 11:36:40 -0500 (0:00:00.119) 0:00:42.293 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 25 January 2025 11:36:40 -0500 (0:00:00.052) 0:00:42.345 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 25 January 2025 11:36:40 -0500 (0:00:00.039) 0:00:42.384 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 25 January 2025 11:36:40 -0500 (0:00:00.031) 0:00:42.416 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 25 January 2025 11:36:40 -0500 (0:00:00.030) 0:00:42.446 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 25 January 2025 11:36:40 -0500 (0:00:00.030) 0:00:42.477 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 25 January 2025 11:36:40 -0500 (0:00:00.030) 0:00:42.508 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [managed-node3] => (item=Fedora.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 25 January 2025 11:36:40 -0500 (0:00:00.069) 0:00:42.577 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 25 January 2025 11:36:41 -0500 (0:00:01.027) 0:00:43.605 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 25 January 2025 11:36:41 -0500 (0:00:00.042) 0:00:43.647 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 25 January 2025 11:36:41 -0500 (0:00:00.060) 0:00:43.708 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 25 January 2025 11:36:41 -0500 (0:00:00.047) 0:00:43.755 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 25 January 2025 11:36:41 -0500 (0:00:00.040) 0:00:43.796 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 25 January 2025 11:36:41 -0500 (0:00:00.036) 0:00:43.833 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.028353", "end": "2025-01-25 11:36:42.316183", "rc": 0, "start": "2025-01-25 11:36:42.287830" } STDOUT: podman version 5.3.2 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 25 January 2025 11:36:42 -0500 (0:00:00.446) 0:00:44.280 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 25 January 2025 11:36:42 -0500 (0:00:00.070) 0:00:44.351 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 25 January 2025 11:36:42 -0500 (0:00:00.067) 0:00:44.418 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 25 January 2025 11:36:42 -0500 (0:00:00.083) 0:00:44.502 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 25 January 2025 11:36:42 -0500 (0:00:00.072) 0:00:44.574 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 25 January 2025 11:36:42 -0500 (0:00:00.093) 0:00:44.668 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 25 January 2025 11:36:42 -0500 (0:00:00.100) 0:00:44.768 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:36:43 -0500 (0:00:00.130) 0:00:44.899 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:36:43 -0500 (0:00:00.043) 0:00:44.943 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:36:43 -0500 (0:00:00.039) 0:00:44.982 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:36:43 -0500 (0:00:00.043) 0:00:45.026 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:36:43 -0500 (0:00:00.414) 0:00:45.440 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004111", "end": "2025-01-25 11:36:43.910139", "rc": 0, "start": "2025-01-25 11:36:43.906028" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:36:44 -0500 (0:00:00.432) 0:00:45.872 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006522", "end": "2025-01-25 11:36:44.372261", "rc": 0, "start": "2025-01-25 11:36:44.365739" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:36:44 -0500 (0:00:00.496) 0:00:46.369 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:36:44 -0500 (0:00:00.129) 0:00:46.498 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:36:44 -0500 (0:00:00.060) 0:00:46.558 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:36:44 -0500 (0:00:00.065) 0:00:46.623 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:36:44 -0500 (0:00:00.071) 0:00:46.695 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:36:44 -0500 (0:00:00.090) 0:00:46.785 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 25 January 2025 11:36:44 -0500 (0:00:00.055) 0:00:46.840 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/home/user_quadlet_pod/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/home/user_quadlet_pod/.config/containers/policy.json", "__podman_registries_conf_file": "/home/user_quadlet_pod/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/home/user_quadlet_pod/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.088) 0:00:46.929 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.097) 0:00:47.027 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.060) 0:00:47.087 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.060) 0:00:47.148 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.098) 0:00:47.246 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.061) 0:00:47.308 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.076) 0:00:47.384 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.134) 0:00:47.519 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.057) 0:00:47.577 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.060) 0:00:47.637 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.105) 0:00:47.743 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.055) 0:00:47.799 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 25 January 2025 11:36:45 -0500 (0:00:00.057) 0:00:47.857 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.069) 0:00:47.927 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.074) 0:00:48.001 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.060) 0:00:48.062 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.062) 0:00:48.124 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.056) 0:00:48.180 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.051) 0:00:48.232 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.055) 0:00:48.287 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.043) 0:00:48.331 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.047) 0:00:48.378 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.175) 0:00:48.554 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.101) 0:00:48.656 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.070) 0:00:48.727 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 25 January 2025 11:36:46 -0500 (0:00:00.073) 0:00:48.800 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 25 January 2025 11:36:47 -0500 (0:00:00.105) 0:00:48.905 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:36:47 -0500 (0:00:00.104) 0:00:49.010 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:36:47 -0500 (0:00:00.070) 0:00:49.080 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:36:47 -0500 (0:00:00.080) 0:00:49.161 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:36:47 -0500 (0:00:00.086) 0:00:49.247 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:36:47 -0500 (0:00:00.434) 0:00:49.681 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004060", "end": "2025-01-25 11:36:48.175831", "rc": 0, "start": "2025-01-25 11:36:48.171771" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:36:48 -0500 (0:00:00.424) 0:00:50.106 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006327", "end": "2025-01-25 11:36:48.573058", "rc": 0, "start": "2025-01-25 11:36:48.566731" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:36:48 -0500 (0:00:00.402) 0:00:50.508 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:36:48 -0500 (0:00:00.048) 0:00:50.557 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:36:48 -0500 (0:00:00.032) 0:00:50.589 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:36:48 -0500 (0:00:00.031) 0:00:50.621 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:36:48 -0500 (0:00:00.036) 0:00:50.658 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:36:48 -0500 (0:00:00.037) 0:00:50.695 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 25 January 2025 11:36:48 -0500 (0:00:00.040) 0:00:50.736 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 25 January 2025 11:36:48 -0500 (0:00:00.067) 0:00:50.803 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 25 January 2025 11:36:48 -0500 (0:00:00.049) 0:00:50.853 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 25 January 2025 11:36:49 -0500 (0:00:00.050) 0:00:50.903 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 25 January 2025 11:36:49 -0500 (0:00:00.141) 0:00:51.045 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 25 January 2025 11:36:49 -0500 (0:00:00.068) 0:00:51.114 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 25 January 2025 11:36:49 -0500 (0:00:00.137) 0:00:51.251 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822987.3983405, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737822998.2703187, "dev": 86, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737822998.2703187, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 220, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 25 January 2025 11:36:49 -0500 (0:00:00.431) 0:00:51.683 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-container.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-25 11:36:38 EST", "ActiveEnterTimestampMonotonic": "694366856", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "-.mount run-user-2223.mount podman-user-wait-network-online.service quadlet-pod-pod-pod.service basic.target app.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-25 11:36:38 EST", "AssertTimestampMonotonic": "694247313", "Before": "shutdown.target default.target", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "91828000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-25 11:36:38 EST", "ConditionTimestampMonotonic": "694247307", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/user.slice/user-2223.slice/user@2223.service/app.slice/quadlet-pod-container.service", "ControlGroupId": "13797", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/user/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893915648", "EffectiveMemoryMax": "3893915648", "EffectiveTasksMax": "4417", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "71550", "ExecMainStartTimestamp": "Sat 2025-01-25 11:36:38 EST", "ExecMainStartTimestampMonotonic": "694366787", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[Sat 2025-01-25 11:36:38 EST] ; stop_time=[n/a] ; pid=71540 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[Sat 2025-01-25 11:36:38 EST] ; stop_time=[n/a] ; pid=71540 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-25 11:36:38 EST", "InactiveExitTimestampMonotonic": "694254314", "InvocationID": "98fc03400df24a2aa70cb6ea2f374f2d", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14725", "LimitNPROCSoft": "14725", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14725", "LimitSIGPENDINGSoft": "14725", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "71550", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3856330752", "MemoryCurrent": "888832", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "17850368", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-25 11:36:38 EST", "StateChangeTimestampMonotonic": "694366856", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "4417", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "default.target quadlet-pod-pod-pod.service", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 25 January 2025 11:37:01 -0500 (0:00:11.188) 0:01:02.872 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822997.4673204, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "ctime": 1737822996.8423216, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 393224, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737822996.5683222, "nlink": 1, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 230, "uid": 2223, "version": "244385697", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 25 January 2025 11:37:01 -0500 (0:00:00.447) 0:01:03.319 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 25 January 2025 11:37:01 -0500 (0:00:00.097) 0:01:03.417 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 25 January 2025 11:37:02 -0500 (0:00:00.791) 0:01:04.209 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 25 January 2025 11:37:02 -0500 (0:00:00.084) 0:01:04.293 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 25 January 2025 11:37:02 -0500 (0:00:00.058) 0:01:04.351 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 25 January 2025 11:37:02 -0500 (0:00:00.054) 0:01:04.406 ****** changed: [managed-node3] => { "changed": true, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 25 January 2025 11:37:03 -0500 (0:00:00.465) 0:01:04.871 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 25 January 2025 11:37:03 -0500 (0:00:00.785) 0:01:05.657 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 25 January 2025 11:37:04 -0500 (0:00:00.705) 0:01:06.362 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 25 January 2025 11:37:04 -0500 (0:00:00.132) 0:01:06.495 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 25 January 2025 11:37:04 -0500 (0:00:00.099) 0:01:06.594 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.082599", "end": "2025-01-25 11:37:05.313384", "rc": 0, "start": "2025-01-25 11:37:05.230785" } STDOUT: b63ae9894c3426614aeeb89e44264db6c08f781905e8f1ec50a2d330d34ce01d 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 25 January 2025 11:37:05 -0500 (0:00:00.686) 0:01:07.281 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 25 January 2025 11:37:05 -0500 (0:00:00.112) 0:01:07.394 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 25 January 2025 11:37:05 -0500 (0:00:00.040) 0:01:07.434 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 25 January 2025 11:37:05 -0500 (0:00:00.037) 0:01:07.472 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 25 January 2025 11:37:05 -0500 (0:00:00.050) 0:01:07.523 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.048809", "end": "2025-01-25 11:37:06.141093", "rc": 0, "start": "2025-01-25 11:37:06.092284" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 25 January 2025 11:37:06 -0500 (0:00:00.605) 0:01:08.128 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.049264", "end": "2025-01-25 11:37:06.831962", "rc": 0, "start": "2025-01-25 11:37:06.782698" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 25 January 2025 11:37:06 -0500 (0:00:00.675) 0:01:08.804 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.047462", "end": "2025-01-25 11:37:07.436036", "rc": 0, "start": "2025-01-25 11:37:07.388574" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 25 January 2025 11:37:07 -0500 (0:00:00.580) 0:01:09.384 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.050375", "end": "2025-01-25 11:37:08.023123", "rc": 0, "start": "2025-01-25 11:37:07.972748" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 25 January 2025 11:37:08 -0500 (0:00:00.640) 0:01:10.025 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 25 January 2025 11:37:08 -0500 (0:00:00.631) 0:01:10.656 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 25 January 2025 11:37:09 -0500 (0:00:00.586) 0:01:11.242 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf5-makecache.service": { "name": "dnf5-makecache.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction-cleanup.service": { "name": "dnf5-offline-transaction-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction.service": { "name": "dnf5-offline-transaction.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-pod-pod-pod.service": { "name": "quadlet-pod-pod-pod.service", "source": "systemd", "state": "running", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-bsod.service": { "name": "systemd-bsod.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed-firstboot.service": { "name": "systemd-homed-firstboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-mountfsd.service": { "name": "systemd-mountfsd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-persistent-storage.service": { "name": "systemd-networkd-persistent-storage.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-nsresourced.service": { "name": "systemd-nsresourced.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-storagetm.service": { "name": "systemd-storagetm.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@2223.service": { "name": "user-runtime-dir@2223.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@2223.service": { "name": "user@2223.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 25 January 2025 11:37:12 -0500 (0:00:02.905) 0:01:14.148 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 25 January 2025 11:37:12 -0500 (0:00:00.058) 0:01:14.206 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 25 January 2025 11:37:12 -0500 (0:00:00.061) 0:01:14.268 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 25 January 2025 11:37:12 -0500 (0:00:00.050) 0:01:14.319 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 25 January 2025 11:37:12 -0500 (0:00:00.044) 0:01:14.363 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 25 January 2025 11:37:12 -0500 (0:00:00.057) 0:01:14.420 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:37:12 -0500 (0:00:00.069) 0:01:14.490 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:37:12 -0500 (0:00:00.042) 0:01:14.532 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:37:12 -0500 (0:00:00.040) 0:01:14.573 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:37:12 -0500 (0:00:00.060) 0:01:14.633 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:37:13 -0500 (0:00:00.420) 0:01:15.054 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004057", "end": "2025-01-25 11:37:13.523298", "rc": 0, "start": "2025-01-25 11:37:13.519241" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:37:13 -0500 (0:00:00.405) 0:01:15.460 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006104", "end": "2025-01-25 11:37:13.955184", "rc": 0, "start": "2025-01-25 11:37:13.949080" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.428) 0:01:15.888 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.056) 0:01:15.945 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.037) 0:01:15.983 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.036) 0:01:16.020 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.035) 0:01:16.056 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.039) 0:01:16.095 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.035) 0:01:16.131 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.058) 0:01:16.190 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.035) 0:01:16.226 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.037) 0:01:16.263 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.119) 0:01:16.382 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.048) 0:01:16.431 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 25 January 2025 11:37:14 -0500 (0:00:00.081) 0:01:16.512 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822987.3983405, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737823020.9632733, "dev": 86, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737823020.9632733, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 25 January 2025 11:37:15 -0500 (0:00:00.407) 0:01:16.920 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-pod-pod.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "app.slice -.mount run-user-2223.mount podman-user-wait-network-online.service basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/user/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893915648", "EffectiveMemoryMax": "3893915648", "EffectiveTasksMax": "4417", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14725", "LimitNPROCSoft": "14725", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14725", "LimitSIGPENDINGSoft": "14725", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3884122112", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/user/2223/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4417", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 25 January 2025 11:37:15 -0500 (0:00:00.737) 0:01:17.657 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822989.3973365, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "ctime": 1737822988.7533376, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 393223, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737822988.4733384, "nlink": 1, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 70, "uid": 2223, "version": "2630022445", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 25 January 2025 11:37:16 -0500 (0:00:00.427) 0:01:18.084 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 25 January 2025 11:37:16 -0500 (0:00:00.071) 0:01:18.156 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 25 January 2025 11:37:16 -0500 (0:00:00.401) 0:01:18.558 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 25 January 2025 11:37:16 -0500 (0:00:00.058) 0:01:18.616 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 25 January 2025 11:37:16 -0500 (0:00:00.056) 0:01:18.673 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 25 January 2025 11:37:16 -0500 (0:00:00.055) 0:01:18.728 ****** changed: [managed-node3] => { "changed": true, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 25 January 2025 11:37:17 -0500 (0:00:00.420) 0:01:19.149 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 25 January 2025 11:37:18 -0500 (0:00:00.721) 0:01:19.870 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 25 January 2025 11:37:18 -0500 (0:00:00.544) 0:01:20.415 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 25 January 2025 11:37:18 -0500 (0:00:00.056) 0:01:20.472 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 25 January 2025 11:37:18 -0500 (0:00:00.036) 0:01:20.508 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.046369", "end": "2025-01-25 11:37:19.095667", "rc": 0, "start": "2025-01-25 11:37:19.049298" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 25 January 2025 11:37:19 -0500 (0:00:00.526) 0:01:21.034 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 25 January 2025 11:37:19 -0500 (0:00:00.067) 0:01:21.102 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 25 January 2025 11:37:19 -0500 (0:00:00.037) 0:01:21.139 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 25 January 2025 11:37:19 -0500 (0:00:00.037) 0:01:21.176 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 25 January 2025 11:37:19 -0500 (0:00:00.046) 0:01:21.223 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.049723", "end": "2025-01-25 11:37:19.818104", "rc": 0, "start": "2025-01-25 11:37:19.768381" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 25 January 2025 11:37:19 -0500 (0:00:00.533) 0:01:21.756 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.046716", "end": "2025-01-25 11:37:20.348444", "rc": 0, "start": "2025-01-25 11:37:20.301728" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 25 January 2025 11:37:20 -0500 (0:00:00.531) 0:01:22.288 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.047987", "end": "2025-01-25 11:37:20.886074", "rc": 0, "start": "2025-01-25 11:37:20.838087" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 25 January 2025 11:37:20 -0500 (0:00:00.562) 0:01:22.850 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.044907", "end": "2025-01-25 11:37:21.479424", "rc": 0, "start": "2025-01-25 11:37:21.434517" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 25 January 2025 11:37:21 -0500 (0:00:00.572) 0:01:23.422 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 25 January 2025 11:37:22 -0500 (0:00:00.580) 0:01:24.003 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 25 January 2025 11:37:22 -0500 (0:00:00.581) 0:01:24.585 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf5-makecache.service": { "name": "dnf5-makecache.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction-cleanup.service": { "name": "dnf5-offline-transaction-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction.service": { "name": "dnf5-offline-transaction.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-pod-pod-pod.service": { "name": "quadlet-pod-pod-pod.service", "source": "systemd", "state": "running", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-bsod.service": { "name": "systemd-bsod.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed-firstboot.service": { "name": "systemd-homed-firstboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-mountfsd.service": { "name": "systemd-mountfsd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-persistent-storage.service": { "name": "systemd-networkd-persistent-storage.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-nsresourced.service": { "name": "systemd-nsresourced.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-storagetm.service": { "name": "systemd-storagetm.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@2223.service": { "name": "user-runtime-dir@2223.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@2223.service": { "name": "user@2223.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 25 January 2025 11:37:25 -0500 (0:00:02.746) 0:01:27.331 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 25 January 2025 11:37:25 -0500 (0:00:00.043) 0:01:27.375 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml for managed-node3 => (item=user_quadlet_pod) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:4 Saturday 25 January 2025 11:37:25 -0500 (0:00:00.087) 0:01:27.462 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_linger_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set cancel linger vars] *************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:11 Saturday 25 January 2025 11:37:25 -0500 (0:00:00.042) 0:01:27.505 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:16 Saturday 25 January 2025 11:37:25 -0500 (0:00:00.050) 0:01:27.556 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822987.3983405, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737823020.9632733, "dev": 86, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737823020.9632733, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Gather facts for containers] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:21 Saturday 25 January 2025 11:37:26 -0500 (0:00:00.422) 0:01:27.978 ****** ok: [managed-node3] => { "changed": false, "containers": [] } TASK [fedora.linux_system_roles.podman : Gather facts for networks] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:30 Saturday 25 January 2025 11:37:26 -0500 (0:00:00.691) 0:01:28.669 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-q" ], "delta": "0:00:00.055997", "end": "2025-01-25 11:37:27.308780", "rc": 0, "start": "2025-01-25 11:37:27.252783" } STDOUT: podman TASK [fedora.linux_system_roles.podman : Gather secrets] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:40 Saturday 25 January 2025 11:37:27 -0500 (0:00:00.586) 0:01:29.255 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "secret", "ls", "-n", "-q" ], "delta": "0:00:00.047462", "end": "2025-01-25 11:37:27.855319", "rc": 0, "start": "2025-01-25 11:37:27.807857" } TASK [fedora.linux_system_roles.podman : Cancel linger if no more resources are in use] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:50 Saturday 25 January 2025 11:37:27 -0500 (0:00:00.547) 0:01:29.803 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "loginctl", "disable-linger", "user_quadlet_pod" ], "delta": "0:00:00.008266", "end": "2025-01-25 11:37:28.286389", "rc": 0, "start": "2025-01-25 11:37:28.278123" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 Saturday 25 January 2025 11:37:28 -0500 (0:00:00.466) 0:01:30.269 ****** fatal: [managed-node3]: FAILED! => { "attempts": 1, "changed": false, "cmd": [ "loginctl", "show-user", "--value", "-p", "State", "user_quadlet_pod" ], "delta": "0:00:00.006852", "end": "2025-01-25 11:37:28.754571", "rc": 1, "start": "2025-01-25 11:37:28.747719" } STDERR: Failed to get user: User ID 2223 is not logged in or lingering MSG: non-zero return code ...ignoring TASK [fedora.linux_system_roles.podman : Stop logind] ************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:76 Saturday 25 January 2025 11:37:28 -0500 (0:00:00.422) 0:01:30.691 ****** changed: [managed-node3] => { "changed": true, "name": "systemd-logind", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-25 11:35:44 EST", "ActiveEnterTimestampMonotonic": "641043324", "ActiveExitTimestamp": "Sat 2025-01-25 11:35:44 EST", "ActiveExitTimestampMonotonic": "640532608", "ActiveState": "active", "After": "sysinit.target systemd-remount-fs.service tmp.mount -.mount system.slice basic.target modprobe@drm.service nss-user-lookup.target systemd-journald.socket dbus.socket systemd-tmpfiles-setup.service user.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-25 11:35:44 EST", "AssertTimestampMonotonic": "640988284", "Before": "session-7.scope user@0.service shutdown.target user-runtime-dir@0.service multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.freedesktop.login1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "184911000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanClean": "runtime state fdstore", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_linux_immutable cap_sys_admin cap_sys_tty_config cap_audit_control cap_mac_admin", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-25 11:35:44 EST", "ConditionTimestampMonotonic": "640988281", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/systemd-logind.service", "ControlGroupId": "10255", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "User Login Management", "DeviceAllow": "block-* r", "DevicePolicy": "auto", "Documentation": "\"man:sd-login(3)\" \"man:systemd-logind.service(8)\" \"man:logind.conf(5)\" \"man:org.freedesktop.login1(5)\"", "DropInPaths": "/usr/lib/systemd/system/systemd-logind.service.d/10-grub2-logind-service.conf /usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3893915648", "EffectiveMemoryMax": "3893915648", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "4417", "Environment": "SYSTEMD_REBOOT_TO_BOOT_LOADER_MENU=true SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-25 11:35:44 EST", "ExecMainHandoffTimestampMonotonic": "641029168", "ExecMainPID": "62064", "ExecMainStartTimestamp": "Sat 2025-01-25 11:35:44 EST", "ExecMainStartTimestampMonotonic": "640991517", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-logind ; argv[]=/usr/lib/systemd/systemd-logind ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-logind ; argv[]=/usr/lib/systemd/systemd-logind ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "768", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/systemd-logind.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPAddressDeny": "::/0 0.0.0.0/0", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-logind.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Sat 2025-01-25 11:35:44 EST", "InactiveEnterTimestampMonotonic": "640550597", "InactiveExitTimestamp": "Sat 2025-01-25 11:35:44 EST", "InactiveExitTimestampMonotonic": "640991873", "InvocationID": "5f59d037eb694b078093e10bb6e5a3fc", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "524288", "LimitNPROC": "14725", "LimitNPROCSoft": "14725", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14725", "LimitSIGPENDINGSoft": "14725", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "62064", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3476754432", "MemoryCurrent": "5963776", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "6234112", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "2", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "systemd-logind.service dbus-org.freedesktop.login1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "yes", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "yes", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "yes", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "strict", "ReadWritePaths": "/etc /run", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount sysinit.target system.slice", "RequiresMountsFor": "/run/systemd/users /run/systemd/inhibit /var/lib/systemd/linger /run/systemd/shutdown /run/systemd/seats /run/systemd/sessions", "Restart": "always", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "0", "RestartUSecNext": "0", "RestrictAddressFamilies": "AF_NETLINK AF_UNIX", "RestrictNamespaces": "yes", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectory": "systemd/inhibit systemd/seats systemd/sessions systemd/shutdown systemd/users", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "yes", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-25 11:35:44 EST", "StateChangeTimestampMonotonic": "641043324", "StateDirectory": "systemd/linger", "StateDirectoryMode": "0755", "StatusErrno": "0", "StatusText": "Processing requests...", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "1", "SystemCallFilter": "_llseek _newselect accept accept4 access add_key alarm arch_prctl arm_fadvise64_64 bind brk cacheflush capget capset chdir chmod chown chown32 clock_getres clock_getres_time64 clock_gettime clock_gettime64 clock_nanosleep clock_nanosleep_time64 clone clone3 close close_range connect copy_file_range creat dup dup2 dup3 epoll_create epoll_create1 epoll_ctl epoll_ctl_old epoll_pwait epoll_pwait2 epoll_wait epoll_wait_old eventfd eventfd2 execve execveat exit exit_group faccessat faccessat2 fadvise64 fadvise64_64 fallocate fchdir fchmod fchmodat fchmodat2 fchown fchown32 fchownat fcntl fcntl64 fdatasync fgetxattr flistxattr flock fork fremovexattr fsetxattr fstat fstat64 fstatat64 fstatfs fstatfs64 fsync ftruncate ftruncate64 futex futex_time64 futex_waitv futimesat get_mempolicy get_robust_list get_thread_area getcpu getcwd getdents getdents64 getegid getegid32 geteuid geteuid32 getgid getgid32 getgroups getgroups32 getitimer getpeername getpgid getpgrp getpid getppid getpriority getrandom getresgid getresgid32 getresuid getresuid32 getrlimit getrusage getsid getsockname getsockopt gettid gettimeofday getuid getuid32 getxattr inotify_add_watch inotify_init inotify_init1 inotify_rm_watch io_cancel io_destroy io_getevents io_pgetevents io_pgetevents_time64 io_setup io_submit io_uring_enter io_uring_register io_uring_setup ioctl ioprio_get ioprio_set ipc kcmp keyctl kill landlock_add_rule landlock_create_ruleset landlock_restrict_self lchown lchown32 lgetxattr link linkat listen listxattr llistxattr lremovexattr lseek lsetxattr lstat lstat64 madvise mbind membarrier memfd_create migrate_pages mkdir mkdirat mknod mknodat mlock mlock2 mlockall mmap mmap2 move_pages mprotect mq_getsetattr mq_notify mq_open mq_timedreceive mq_timedreceive_time64 mq_timedsend mq_timedsend_time64 mq_unlink mremap msgctl msgget msgrcv msgsnd msync munlock munlockall munmap name_to_handle_at nanosleep newfstatat nice oldfstat oldlstat oldolduname oldstat olduname open openat openat2 pause personality pidfd_open pidfd_send_signal pipe pipe2 poll ppoll ppoll_time64 prctl pread64 preadv preadv2 prlimit64 process_madvise process_vm_readv process_vm_writev pselect6 pselect6_time64 pwrite64 pwritev pwritev2 read readahead readdir readlink readlinkat readv recv recvfrom recvmmsg recvmmsg_time64 recvmsg remap_file_pages removexattr rename renameat renameat2 request_key restart_syscall riscv_flush_icache rmdir rseq rt_sigaction rt_sigpending rt_sigprocmask rt_sigqueueinfo rt_sigreturn rt_sigsuspend rt_sigtimedwait rt_sigtimedwait_time64 rt_tgsigqueueinfo sched_get_priority_max sched_get_priority_min sched_getaffinity sched_getattr sched_getparam sched_getscheduler sched_rr_get_interval sched_rr_get_interval_time64 sched_setaffinity sched_setattr sched_setparam sched_setscheduler sched_yield seccomp select semctl semget semop semtimedop semtimedop_time64 send sendfile sendfile64 sendmmsg sendmsg sendto set_mempolicy set_mempolicy_home_node set_robust_list set_thread_area set_tid_address set_tls setfsgid setfsgid32 setfsuid setfsuid32 setgid setgid32 setgroups setgroups32 setitimer setns setpgid setpriority setregid setregid32 setresgid setresgid32 setresuid setresuid32 setreuid setreuid32 setrlimit setsid setsockopt setuid setuid32 setxattr shmat shmctl shmdt shmget shutdown sigaction sigaltstack signal signalfd signalfd4 sigpending sigprocmask sigreturn sigsuspend socket socketcall socketpair splice stat stat64 statfs statfs64 statx swapcontext symlink symlinkat sync sync_file_range sync_file_range2 syncfs sysinfo tee tgkill time timer_create timer_delete timer_getoverrun timer_gettime timer_gettime64 timer_settime timer_settime64 timerfd_create timerfd_gettime timerfd_gettime64 timerfd_settime timerfd_settime64 times tkill truncate truncate64 ugetrlimit umask uname unlink unlinkat unshare userfaultfd utime utimensat utimensat_time64 utimes vfork vmsplice wait4 waitid waitpid write writev", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "4417", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify-reload", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "static", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "-.mount tmp.mount dbus.socket modprobe@drm.service user.slice", "WantsMountsFor": "/var/tmp /tmp", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:81 Saturday 25 January 2025 11:37:29 -0500 (0:00:00.627) 0:01:31.319 ****** ok: [managed-node3] => { "attempts": 1, "changed": false, "cmd": [ "loginctl", "show-user", "--value", "-p", "State", "user_quadlet_pod" ], "delta": "0:00:00.088824", "end": "2025-01-25 11:37:29.873694", "failed_when_result": false, "rc": 1, "start": "2025-01-25 11:37:29.784870" } STDERR: Failed to get user: User ID 2223 is not logged in or lingering MSG: non-zero return code TASK [fedora.linux_system_roles.podman : Restart logind] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:92 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.552) 0:01:31.871 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__user_state is failed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.036) 0:01:31.908 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.032) 0:01:31.940 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:109 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.055) 0:01:31.996 ****** ok: [managed-node3] => { "changed": false } MSG: All assertions passed TASK [Ensure no linger] ******************************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:120 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.059) 0:01:32.056 ****** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [Cleanup user] ************************************************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:159 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.397) 0:01:32.453 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.088) 0:01:32.542 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.067) 0:01:32.610 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.045) 0:01:32.655 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.036) 0:01:32.691 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.034) 0:01:32.726 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.034) 0:01:32.761 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 25 January 2025 11:37:30 -0500 (0:00:00.037) 0:01:32.798 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [managed-node3] => (item=Fedora.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 25 January 2025 11:37:31 -0500 (0:00:00.072) 0:01:32.871 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 25 January 2025 11:37:31 -0500 (0:00:00.962) 0:01:33.833 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 25 January 2025 11:37:32 -0500 (0:00:00.036) 0:01:33.869 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 25 January 2025 11:37:32 -0500 (0:00:00.039) 0:01:33.909 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 25 January 2025 11:37:32 -0500 (0:00:00.035) 0:01:33.944 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 25 January 2025 11:37:32 -0500 (0:00:00.038) 0:01:33.982 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 25 January 2025 11:37:32 -0500 (0:00:00.034) 0:01:34.016 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.031410", "end": "2025-01-25 11:37:32.510382", "rc": 0, "start": "2025-01-25 11:37:32.478972" } STDOUT: podman version 5.3.2 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 25 January 2025 11:37:32 -0500 (0:00:00.438) 0:01:34.455 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 25 January 2025 11:37:32 -0500 (0:00:00.052) 0:01:34.508 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 25 January 2025 11:37:32 -0500 (0:00:00.062) 0:01:34.570 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 25 January 2025 11:37:32 -0500 (0:00:00.086) 0:01:34.656 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 25 January 2025 11:37:32 -0500 (0:00:00.078) 0:01:34.735 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 25 January 2025 11:37:32 -0500 (0:00:00.100) 0:01:34.835 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 25 January 2025 11:37:33 -0500 (0:00:00.107) 0:01:34.943 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:37:33 -0500 (0:00:00.121) 0:01:35.065 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:37:33 -0500 (0:00:00.072) 0:01:35.137 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:37:33 -0500 (0:00:00.071) 0:01:35.209 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:37:33 -0500 (0:00:00.078) 0:01:35.287 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:37:33 -0500 (0:00:00.428) 0:01:35.716 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004103", "end": "2025-01-25 11:37:34.217841", "rc": 0, "start": "2025-01-25 11:37:34.213738" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:37:34 -0500 (0:00:00.460) 0:01:36.176 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.007030", "end": "2025-01-25 11:37:34.678985", "rc": 0, "start": "2025-01-25 11:37:34.671955" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:37:34 -0500 (0:00:00.444) 0:01:36.620 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:37:34 -0500 (0:00:00.071) 0:01:36.692 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:37:34 -0500 (0:00:00.094) 0:01:36.786 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:37:34 -0500 (0:00:00.037) 0:01:36.824 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.037) 0:01:36.861 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.036) 0:01:36.898 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.040) 0:01:36.938 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/home/user_quadlet_pod/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/home/user_quadlet_pod/.config/containers/policy.json", "__podman_registries_conf_file": "/home/user_quadlet_pod/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/home/user_quadlet_pod/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.050) 0:01:36.988 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.109) 0:01:37.098 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.060) 0:01:37.158 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.053) 0:01:37.212 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.077) 0:01:37.290 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.045) 0:01:37.335 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.041) 0:01:37.376 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.068) 0:01:37.444 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.040) 0:01:37.485 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.036) 0:01:37.521 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.096) 0:01:37.618 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.068) 0:01:37.687 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.065) 0:01:37.752 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 25 January 2025 11:37:35 -0500 (0:00:00.058) 0:01:37.811 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.061) 0:01:37.872 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.047) 0:01:37.920 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.043) 0:01:37.964 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.043) 0:01:38.008 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.036) 0:01:38.044 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.036) 0:01:38.080 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.031) 0:01:38.112 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.030) 0:01:38.142 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.102) 0:01:38.245 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.068) 0:01:38.313 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.054) 0:01:38.368 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.038) 0:01:38.406 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.069) 0:01:38.476 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.071) 0:01:38.547 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.040) 0:01:38.588 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.041) 0:01:38.629 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:37:36 -0500 (0:00:00.049) 0:01:38.678 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:37:37 -0500 (0:00:00.407) 0:01:39.086 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003980", "end": "2025-01-25 11:37:37.565224", "rc": 0, "start": "2025-01-25 11:37:37.561244" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:37:37 -0500 (0:00:00.412) 0:01:39.498 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006025", "end": "2025-01-25 11:37:37.973197", "rc": 0, "start": "2025-01-25 11:37:37.967172" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.500) 0:01:39.998 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.087) 0:01:40.086 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.060) 0:01:40.146 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.042) 0:01:40.189 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.045) 0:01:40.235 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.044) 0:01:40.279 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.041) 0:01:40.320 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.061) 0:01:40.382 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.044) 0:01:40.426 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.035) 0:01:40.462 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.085) 0:01:40.547 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.065) 0:01:40.613 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 25 January 2025 11:37:38 -0500 (0:00:00.108) 0:01:40.722 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 25 January 2025 11:37:39 -0500 (0:00:00.425) 0:01:41.147 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 25 January 2025 11:37:39 -0500 (0:00:00.044) 0:01:41.192 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 25 January 2025 11:37:39 -0500 (0:00:00.418) 0:01:41.611 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 25 January 2025 11:37:39 -0500 (0:00:00.042) 0:01:41.653 ****** ok: [managed-node3] => { "changed": false, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 25 January 2025 11:37:41 -0500 (0:00:01.771) 0:01:43.425 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 25 January 2025 11:37:41 -0500 (0:00:00.038) 0:01:43.463 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 25 January 2025 11:37:41 -0500 (0:00:00.043) 0:01:43.507 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 25 January 2025 11:37:41 -0500 (0:00:00.049) 0:01:43.556 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 25 January 2025 11:37:41 -0500 (0:00:00.037) 0:01:43.593 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 25 January 2025 11:37:41 -0500 (0:00:00.038) 0:01:43.632 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 25 January 2025 11:37:41 -0500 (0:00:00.066) 0:01:43.698 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 25 January 2025 11:37:41 -0500 (0:00:00.040) 0:01:43.739 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 25 January 2025 11:37:41 -0500 (0:00:00.039) 0:01:43.779 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 25 January 2025 11:37:41 -0500 (0:00:00.053) 0:01:43.832 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.040) 0:01:43.872 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.038) 0:01:43.911 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.038) 0:01:43.949 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.042) 0:01:43.991 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.039) 0:01:44.031 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.039) 0:01:44.070 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.039) 0:01:44.109 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.038) 0:01:44.148 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.046) 0:01:44.195 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.044) 0:01:44.239 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.035) 0:01:44.275 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.055) 0:01:44.331 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.067) 0:01:44.398 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.042) 0:01:44.440 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.097) 0:01:44.538 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:37:42 -0500 (0:00:00.051) 0:01:44.589 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:37:43 -0500 (0:00:00.413) 0:01:45.002 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004089", "end": "2025-01-25 11:37:43.478053", "rc": 0, "start": "2025-01-25 11:37:43.473964" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:37:43 -0500 (0:00:00.409) 0:01:45.412 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.007186", "end": "2025-01-25 11:37:43.891687", "rc": 0, "start": "2025-01-25 11:37:43.884501" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:37:43 -0500 (0:00:00.427) 0:01:45.840 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:37:44 -0500 (0:00:00.065) 0:01:45.906 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:37:44 -0500 (0:00:00.060) 0:01:45.966 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:37:44 -0500 (0:00:00.065) 0:01:46.032 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:37:44 -0500 (0:00:00.070) 0:01:46.103 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:37:44 -0500 (0:00:00.064) 0:01:46.167 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 25 January 2025 11:37:44 -0500 (0:00:00.064) 0:01:46.232 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 25 January 2025 11:37:44 -0500 (0:00:00.101) 0:01:46.334 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 25 January 2025 11:37:44 -0500 (0:00:00.067) 0:01:46.401 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 25 January 2025 11:37:44 -0500 (0:00:00.058) 0:01:46.460 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 25 January 2025 11:37:44 -0500 (0:00:00.130) 0:01:46.591 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 25 January 2025 11:37:44 -0500 (0:00:00.076) 0:01:46.667 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 25 January 2025 11:37:44 -0500 (0:00:00.146) 0:01:46.814 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 25 January 2025 11:37:45 -0500 (0:00:00.457) 0:01:47.271 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 25 January 2025 11:37:45 -0500 (0:00:00.069) 0:01:47.341 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 25 January 2025 11:37:45 -0500 (0:00:00.461) 0:01:47.802 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.066) 0:01:47.869 ****** ok: [managed-node3] => { "changed": false, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.454) 0:01:48.323 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.040) 0:01:48.364 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.042) 0:01:48.406 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.046) 0:01:48.453 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.037) 0:01:48.490 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.037) 0:01:48.528 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.065) 0:01:48.593 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.038) 0:01:48.632 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.037) 0:01:48.669 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.045) 0:01:48.714 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.038) 0:01:48.752 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.037) 0:01:48.790 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 25 January 2025 11:37:46 -0500 (0:00:00.038) 0:01:48.829 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.042) 0:01:48.871 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.045) 0:01:48.916 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.039) 0:01:48.956 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.038) 0:01:48.994 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.037) 0:01:49.031 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml for managed-node3 => (item=user_quadlet_pod) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:4 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.091) 0:01:49.122 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_linger_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set cancel linger vars] *************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:11 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.044) 0:01:49.167 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:16 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.097) 0:01:49.265 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Gather facts for containers] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:21 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.408) 0:01:49.674 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather facts for networks] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:30 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.037) 0:01:49.711 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather secrets] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:40 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.036) 0:01:49.747 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger if no more resources are in use] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:50 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.039) 0:01:49.787 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.035) 0:01:49.822 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop logind] ************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:76 Saturday 25 January 2025 11:37:47 -0500 (0:00:00.034) 0:01:49.857 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:81 Saturday 25 January 2025 11:37:48 -0500 (0:00:00.034) 0:01:49.892 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart logind] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:92 Saturday 25 January 2025 11:37:48 -0500 (0:00:00.046) 0:01:49.938 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 25 January 2025 11:37:48 -0500 (0:00:00.036) 0:01:49.975 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 25 January 2025 11:37:48 -0500 (0:00:00.032) 0:01:50.007 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Remove test user] ******************************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:168 Saturday 25 January 2025 11:37:48 -0500 (0:00:00.055) 0:01:50.063 ****** changed: [managed-node3] => { "changed": true, "force": false, "name": "user_quadlet_pod", "remove": false, "state": "absent" } TASK [Cleanup system - root] *************************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:173 Saturday 25 January 2025 11:37:48 -0500 (0:00:00.541) 0:01:50.604 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 25 January 2025 11:37:48 -0500 (0:00:00.110) 0:01:50.715 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 25 January 2025 11:37:48 -0500 (0:00:00.095) 0:01:50.810 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 25 January 2025 11:37:48 -0500 (0:00:00.049) 0:01:50.859 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 25 January 2025 11:37:49 -0500 (0:00:00.045) 0:01:50.905 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 25 January 2025 11:37:49 -0500 (0:00:00.052) 0:01:50.957 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 25 January 2025 11:37:49 -0500 (0:00:00.041) 0:01:50.999 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 25 January 2025 11:37:49 -0500 (0:00:00.037) 0:01:51.036 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [managed-node3] => (item=Fedora.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 25 January 2025 11:37:49 -0500 (0:00:00.080) 0:01:51.117 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 25 January 2025 11:37:50 -0500 (0:00:00.984) 0:01:52.101 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 25 January 2025 11:37:50 -0500 (0:00:00.038) 0:01:52.140 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 25 January 2025 11:37:50 -0500 (0:00:00.040) 0:01:52.181 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 25 January 2025 11:37:50 -0500 (0:00:00.035) 0:01:52.217 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 25 January 2025 11:37:50 -0500 (0:00:00.034) 0:01:52.251 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 25 January 2025 11:37:50 -0500 (0:00:00.040) 0:01:52.291 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.031040", "end": "2025-01-25 11:37:50.801565", "rc": 0, "start": "2025-01-25 11:37:50.770525" } STDOUT: podman version 5.3.2 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 25 January 2025 11:37:50 -0500 (0:00:00.445) 0:01:52.737 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 25 January 2025 11:37:50 -0500 (0:00:00.039) 0:01:52.777 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 25 January 2025 11:37:50 -0500 (0:00:00.034) 0:01:52.811 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 25 January 2025 11:37:50 -0500 (0:00:00.045) 0:01:52.856 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 25 January 2025 11:37:51 -0500 (0:00:00.045) 0:01:52.902 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 25 January 2025 11:37:51 -0500 (0:00:00.058) 0:01:52.961 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 25 January 2025 11:37:51 -0500 (0:00:00.070) 0:01:53.031 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:37:51 -0500 (0:00:00.070) 0:01:53.102 ****** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:37:51 -0500 (0:00:00.479) 0:01:53.582 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:37:51 -0500 (0:00:00.043) 0:01:53.625 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:37:51 -0500 (0:00:00.066) 0:01:53.692 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.442) 0:01:54.135 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.042) 0:01:54.177 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.037) 0:01:54.215 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.036) 0:01:54.252 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.036) 0:01:54.288 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.041) 0:01:54.330 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.037) 0:01:54.367 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.036) 0:01:54.403 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.036) 0:01:54.440 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.066) 0:01:54.506 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.119) 0:01:54.626 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.064) 0:01:54.690 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 25 January 2025 11:37:52 -0500 (0:00:00.065) 0:01:54.756 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.118) 0:01:54.874 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.042) 0:01:54.917 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.044) 0:01:54.962 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.086) 0:01:55.048 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.039) 0:01:55.087 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.040) 0:01:55.128 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.070) 0:01:55.198 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.038) 0:01:55.237 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.044) 0:01:55.281 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.059) 0:01:55.340 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.055) 0:01:55.396 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.040) 0:01:55.436 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.048) 0:01:55.485 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.043) 0:01:55.529 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.038) 0:01:55.568 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.032) 0:01:55.600 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.036) 0:01:55.636 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.032) 0:01:55.668 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.099) 0:01:55.767 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 25 January 2025 11:37:53 -0500 (0:00:00.063) 0:01:55.831 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 25 January 2025 11:37:54 -0500 (0:00:00.072) 0:01:55.904 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 25 January 2025 11:37:54 -0500 (0:00:00.041) 0:01:55.946 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 25 January 2025 11:37:54 -0500 (0:00:00.059) 0:01:56.005 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:37:54 -0500 (0:00:00.144) 0:01:56.150 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:37:54 -0500 (0:00:00.042) 0:01:56.193 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:37:54 -0500 (0:00:00.040) 0:01:56.233 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:37:54 -0500 (0:00:00.054) 0:01:56.288 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:37:54 -0500 (0:00:00.416) 0:01:56.704 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:37:54 -0500 (0:00:00.037) 0:01:56.741 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:37:54 -0500 (0:00:00.041) 0:01:56.783 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:37:54 -0500 (0:00:00.057) 0:01:56.840 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:37:55 -0500 (0:00:00.061) 0:01:56.901 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:37:55 -0500 (0:00:00.043) 0:01:56.945 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:37:55 -0500 (0:00:00.044) 0:01:56.990 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:37:55 -0500 (0:00:00.056) 0:01:57.047 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 25 January 2025 11:37:55 -0500 (0:00:00.044) 0:01:57.092 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 25 January 2025 11:37:55 -0500 (0:00:00.061) 0:01:57.153 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 25 January 2025 11:37:55 -0500 (0:00:00.038) 0:01:57.192 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 25 January 2025 11:37:55 -0500 (0:00:00.039) 0:01:57.231 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 25 January 2025 11:37:55 -0500 (0:00:00.084) 0:01:57.316 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 25 January 2025 11:37:55 -0500 (0:00:00.067) 0:01:57.384 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 25 January 2025 11:37:55 -0500 (0:00:00.108) 0:01:57.492 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 25 January 2025 11:37:55 -0500 (0:00:00.044) 0:01:57.536 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-container.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-25 11:36:17 EST", "ActiveEnterTimestampMonotonic": "673374917", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "systemd-journald.socket quadlet-pod-pod-pod.service -.mount sysinit.target network-online.target basic.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-25 11:36:17 EST", "AssertTimestampMonotonic": "673242333", "Before": "multi-user.target shutdown.target", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "99997000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-25 11:36:17 EST", "ConditionTimestampMonotonic": "673242329", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-pod-container.service", "ControlGroupId": "12087", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3893915648", "EffectiveMemoryMax": "3893915648", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "4417", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "66629", "ExecMainStartTimestamp": "Sat 2025-01-25 11:36:17 EST", "ExecMainStartTimestampMonotonic": "673374825", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[Sat 2025-01-25 11:36:17 EST] ; stop_time=[n/a] ; pid=66618 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[Sat 2025-01-25 11:36:17 EST] ; stop_time=[n/a] ; pid=66618 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-25 11:36:17 EST", "InactiveExitTimestampMonotonic": "673247020", "InvocationID": "a950b2f52bd94aea8bd8cb58e31eecc7", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14725", "LimitNPROCSoft": "14725", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14725", "LimitSIGPENDINGSoft": "14725", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "66629", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3464675328", "MemoryCurrent": "888832", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "18063360", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-25 11:36:17 EST", "StateChangeTimestampMonotonic": "673374917", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "4417", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "quadlet-pod-pod-pod.service multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 25 January 2025 11:38:06 -0500 (0:00:11.317) 0:02:08.854 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822977.95436, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "ctime": 1737822975.6013656, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 393222, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737822975.2913663, "nlink": 1, "path": "/etc/containers/systemd/quadlet-pod-container.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 230, "uid": 0, "version": "2377285102", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 25 January 2025 11:38:07 -0500 (0:00:00.421) 0:02:09.276 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 25 January 2025 11:38:07 -0500 (0:00:00.069) 0:02:09.345 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 25 January 2025 11:38:07 -0500 (0:00:00.400) 0:02:09.745 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 25 January 2025 11:38:07 -0500 (0:00:00.056) 0:02:09.801 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 25 January 2025 11:38:07 -0500 (0:00:00.039) 0:02:09.841 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 25 January 2025 11:38:08 -0500 (0:00:00.037) 0:02:09.879 ****** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/quadlet-pod-container.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 25 January 2025 11:38:08 -0500 (0:00:00.402) 0:02:10.281 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 25 January 2025 11:38:09 -0500 (0:00:00.816) 0:02:11.097 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 25 January 2025 11:38:09 -0500 (0:00:00.453) 0:02:11.551 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 25 January 2025 11:38:09 -0500 (0:00:00.050) 0:02:11.602 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 25 January 2025 11:38:09 -0500 (0:00:00.038) 0:02:11.640 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.067821", "end": "2025-01-25 11:38:10.172859", "rc": 0, "start": "2025-01-25 11:38:10.105038" } STDOUT: 69a9cbc691bf85d3aa55bb0bf8ba6a329c72d669abf5fe01fa5d83f56a966aaf 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 25 January 2025 11:38:10 -0500 (0:00:00.467) 0:02:12.108 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 25 January 2025 11:38:10 -0500 (0:00:00.069) 0:02:12.177 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 25 January 2025 11:38:10 -0500 (0:00:00.036) 0:02:12.214 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 25 January 2025 11:38:10 -0500 (0:00:00.035) 0:02:12.249 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 25 January 2025 11:38:10 -0500 (0:00:00.043) 0:02:12.293 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.033990", "end": "2025-01-25 11:38:10.802096", "rc": 0, "start": "2025-01-25 11:38:10.768106" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 25 January 2025 11:38:10 -0500 (0:00:00.469) 0:02:12.763 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.033723", "end": "2025-01-25 11:38:11.297226", "rc": 0, "start": "2025-01-25 11:38:11.263503" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 25 January 2025 11:38:11 -0500 (0:00:00.469) 0:02:13.233 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.036656", "end": "2025-01-25 11:38:11.749586", "rc": 0, "start": "2025-01-25 11:38:11.712930" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 25 January 2025 11:38:11 -0500 (0:00:00.476) 0:02:13.710 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.034138", "end": "2025-01-25 11:38:12.243755", "rc": 0, "start": "2025-01-25 11:38:12.209617" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 25 January 2025 11:38:12 -0500 (0:00:00.562) 0:02:14.272 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 25 January 2025 11:38:12 -0500 (0:00:00.452) 0:02:14.725 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 25 January 2025 11:38:13 -0500 (0:00:00.457) 0:02:15.183 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "running", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf5-makecache.service": { "name": "dnf5-makecache.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction-cleanup.service": { "name": "dnf5-offline-transaction-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction.service": { "name": "dnf5-offline-transaction.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "quadlet-pod-pod-pod.service": { "name": "quadlet-pod-pod-pod.service", "source": "systemd", "state": "inactive", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-bsod.service": { "name": "systemd-bsod.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed-firstboot.service": { "name": "systemd-homed-firstboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-mountfsd.service": { "name": "systemd-mountfsd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-persistent-storage.service": { "name": "systemd-networkd-persistent-storage.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-nsresourced.service": { "name": "systemd-nsresourced.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-storagetm.service": { "name": "systemd-storagetm.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 25 January 2025 11:38:16 -0500 (0:00:02.875) 0:02:18.058 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 25 January 2025 11:38:16 -0500 (0:00:00.062) 0:02:18.121 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 25 January 2025 11:38:16 -0500 (0:00:00.063) 0:02:18.185 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 25 January 2025 11:38:16 -0500 (0:00:00.052) 0:02:18.237 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 25 January 2025 11:38:16 -0500 (0:00:00.051) 0:02:18.288 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 25 January 2025 11:38:16 -0500 (0:00:00.058) 0:02:18.346 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 25 January 2025 11:38:16 -0500 (0:00:00.073) 0:02:18.420 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 25 January 2025 11:38:16 -0500 (0:00:00.046) 0:02:18.467 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 25 January 2025 11:38:16 -0500 (0:00:00.042) 0:02:18.509 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 25 January 2025 11:38:16 -0500 (0:00:00.056) 0:02:18.566 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822652.7720828, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1737822613.3291836, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192282, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "882212291", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.433) 0:02:19.000 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.043) 0:02:19.043 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.038) 0:02:19.082 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.037) 0:02:19.120 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.036) 0:02:19.157 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.041) 0:02:19.199 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.042) 0:02:19.241 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.057) 0:02:19.299 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.058) 0:02:19.358 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.084) 0:02:19.442 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.049) 0:02:19.491 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.039) 0:02:19.531 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.079) 0:02:19.611 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.048) 0:02:19.659 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.082) 0:02:19.741 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 25 January 2025 11:38:17 -0500 (0:00:00.039) 0:02:19.781 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-pod-pod.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target systemd-journald.socket system.slice basic.target network-online.target -.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893915648", "EffectiveMemoryMax": "3893915648", "EffectiveTasksMax": "4417", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14725", "LimitNPROCSoft": "14725", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14725", "LimitSIGPENDINGSoft": "14725", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3481223168", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4417", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 25 January 2025 11:38:18 -0500 (0:00:00.876) 0:02:20.658 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737822978.3113592, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "ctime": 1737822968.0953836, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 393221, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737822967.7323844, "nlink": 1, "path": "/etc/containers/systemd/quadlet-pod-pod.pod", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 70, "uid": 0, "version": "2190219053", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 25 January 2025 11:38:19 -0500 (0:00:00.420) 0:02:21.079 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 25 January 2025 11:38:19 -0500 (0:00:00.063) 0:02:21.142 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 25 January 2025 11:38:19 -0500 (0:00:00.404) 0:02:21.547 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 25 January 2025 11:38:19 -0500 (0:00:00.056) 0:02:21.603 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 25 January 2025 11:38:19 -0500 (0:00:00.041) 0:02:21.644 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 25 January 2025 11:38:19 -0500 (0:00:00.051) 0:02:21.696 ****** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/quadlet-pod-pod.pod", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 25 January 2025 11:38:20 -0500 (0:00:00.419) 0:02:22.115 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 25 January 2025 11:38:21 -0500 (0:00:00.806) 0:02:22.921 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 25 January 2025 11:38:21 -0500 (0:00:00.461) 0:02:23.383 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 25 January 2025 11:38:21 -0500 (0:00:00.055) 0:02:23.438 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 25 January 2025 11:38:21 -0500 (0:00:00.096) 0:02:23.534 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.036707", "end": "2025-01-25 11:38:22.043989", "rc": 0, "start": "2025-01-25 11:38:22.007282" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 25 January 2025 11:38:22 -0500 (0:00:00.446) 0:02:23.981 ****** included: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 25 January 2025 11:38:22 -0500 (0:00:00.066) 0:02:24.048 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 25 January 2025 11:38:22 -0500 (0:00:00.040) 0:02:24.089 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 25 January 2025 11:38:22 -0500 (0:00:00.038) 0:02:24.127 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 25 January 2025 11:38:22 -0500 (0:00:00.035) 0:02:24.163 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.034991", "end": "2025-01-25 11:38:22.667720", "rc": 0, "start": "2025-01-25 11:38:22.632729" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 25 January 2025 11:38:22 -0500 (0:00:00.462) 0:02:24.625 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.035252", "end": "2025-01-25 11:38:23.163458", "rc": 0, "start": "2025-01-25 11:38:23.128206" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 25 January 2025 11:38:23 -0500 (0:00:00.503) 0:02:25.129 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.033986", "end": "2025-01-25 11:38:23.638492", "rc": 0, "start": "2025-01-25 11:38:23.604506" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 25 January 2025 11:38:23 -0500 (0:00:00.444) 0:02:25.574 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.034826", "end": "2025-01-25 11:38:24.078661", "rc": 0, "start": "2025-01-25 11:38:24.043835" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 25 January 2025 11:38:24 -0500 (0:00:00.448) 0:02:26.023 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 25 January 2025 11:38:24 -0500 (0:00:00.437) 0:02:26.460 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 25 January 2025 11:38:25 -0500 (0:00:00.445) 0:02:26.906 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf5-makecache.service": { "name": "dnf5-makecache.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction-cleanup.service": { "name": "dnf5-offline-transaction-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction.service": { "name": "dnf5-offline-transaction.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-bsod.service": { "name": "systemd-bsod.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed-firstboot.service": { "name": "systemd-homed-firstboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-mountfsd.service": { "name": "systemd-mountfsd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-persistent-storage.service": { "name": "systemd-networkd-persistent-storage.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-nsresourced.service": { "name": "systemd-nsresourced.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-storagetm.service": { "name": "systemd-storagetm.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 25 January 2025 11:38:27 -0500 (0:00:02.802) 0:02:29.708 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 25 January 2025 11:38:27 -0500 (0:00:00.034) 0:02:29.743 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 25 January 2025 11:38:27 -0500 (0:00:00.031) 0:02:29.774 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 25 January 2025 11:38:27 -0500 (0:00:00.036) 0:02:29.810 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:182 Saturday 25 January 2025 11:38:28 -0500 (0:00:00.051) 0:02:29.861 ****** fatal: [managed-node3]: FAILED! => { "assertion": "__podman_test_debug_secrets.stdout == \"\"", "changed": false, "evaluated_to": false } MSG: Assertion failed TASK [Dump journal] ************************************************************ task path: /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:194 Saturday 25 January 2025 11:38:28 -0500 (0:00:00.046) 0:02:29.908 ****** fatal: [managed-node3]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.024263", "end": "2025-01-25 11:38:28.392868", "failed_when_result": true, "rc": 0, "start": "2025-01-25 11:38:28.368605" } STDOUT: Jan 25 11:38:07 managed-node3 audit[86207]: CRYPTO_KEY_USER pid=86207 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86207 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:07 managed-node3 audit[86231]: CRYPTO_KEY_USER pid=86231 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86231 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:07 managed-node3 audit[86257]: CRYPTO_KEY_USER pid=86257 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86257 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:07 managed-node3 audit[86284]: CRYPTO_KEY_USER pid=86284 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86284 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:07 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[86309]: CRYPTO_KEY_USER pid=86309 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86309 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[86333]: CRYPTO_KEY_USER pid=86333 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86333 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[86363]: CRYPTO_KEY_USER pid=86363 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86363 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[86387]: CRYPTO_KEY_USER pid=86387 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86387 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:08 managed-node3 audit[86412]: CRYPTO_KEY_USER pid=86412 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86412 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:08 managed-node3 python3[86438]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-pod-container.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 25 11:38:08 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[86439]: CRYPTO_KEY_USER pid=86439 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86439 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[86464]: CRYPTO_KEY_USER pid=86464 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86464 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[86488]: CRYPTO_KEY_USER pid=86488 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86488 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[86518]: CRYPTO_KEY_USER pid=86518 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86518 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[86542]: CRYPTO_KEY_USER pid=86542 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86542 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:08 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:08 managed-node3 audit[86567]: CRYPTO_KEY_USER pid=86567 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86567 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:08 managed-node3 python3[86593]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 25 11:38:08 managed-node3 systemd[1]: Reload requested from client PID 86594 ('systemctl') (unit session-10.scope)... Jan 25 11:38:08 managed-node3 systemd[1]: Reloading... Jan 25 11:38:09 managed-node3 systemd[1]: Reloading finished in 235 ms. Jan 25 11:38:09 managed-node3 audit: BPF prog-id=250 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=229 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=251 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=252 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=230 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=231 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=253 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=235 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=254 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=255 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=236 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=237 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=256 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=245 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=257 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=258 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=246 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=247 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=259 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=249 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=260 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=248 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=261 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=238 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=262 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=242 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=263 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=264 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=243 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=244 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=265 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=266 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=239 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=240 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=267 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=232 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=268 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=269 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=233 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=234 op=UNLOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=270 op=LOAD Jan 25 11:38:09 managed-node3 audit: BPF prog-id=241 op=UNLOAD Jan 25 11:38:09 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[86647]: CRYPTO_KEY_USER pid=86647 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86647 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[86673]: CRYPTO_KEY_USER pid=86673 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86673 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[86697]: CRYPTO_KEY_USER pid=86697 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86697 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[86727]: CRYPTO_KEY_USER pid=86727 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86727 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[86751]: CRYPTO_KEY_USER pid=86751 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86751 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:09 managed-node3 audit[86776]: CRYPTO_KEY_USER pid=86776 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86776 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:09 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:09 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[86810]: CRYPTO_KEY_USER pid=86810 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86810 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[86835]: CRYPTO_KEY_USER pid=86835 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86835 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[86859]: CRYPTO_KEY_USER pid=86859 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86859 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[86889]: CRYPTO_KEY_USER pid=86889 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86889 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[86913]: CRYPTO_KEY_USER pid=86913 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86913 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:09 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:09 managed-node3 audit[86938]: CRYPTO_KEY_USER pid=86938 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86938 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:10 managed-node3 python3[86964]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 25 11:38:10 managed-node3 podman[86965]: 2025-01-25 11:38:10.149728061 -0500 EST m=+0.034780168 image untag 69a9cbc691bf85d3aa55bb0bf8ba6a329c72d669abf5fe01fa5d83f56a966aaf localhost/podman-pause:5.3.2-1737504000 Jan 25 11:38:10 managed-node3 podman[86965]: 2025-01-25 11:38:10.133535583 -0500 EST m=+0.018587781 image remove 69a9cbc691bf85d3aa55bb0bf8ba6a329c72d669abf5fe01fa5d83f56a966aaf Jan 25 11:38:10 managed-node3 podman[86965]: 2025-01-25 11:38:10.167781347 -0500 EST m=+0.052833462 image untag 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 25 11:38:10 managed-node3 podman[86965]: 2025-01-25 11:38:10.149735356 -0500 EST m=+0.034787420 image remove 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f Jan 25 11:38:10 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:10 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[86972]: CRYPTO_KEY_USER pid=86972 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86972 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[86997]: CRYPTO_KEY_USER pid=86997 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=86997 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[87021]: CRYPTO_KEY_USER pid=87021 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87021 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[87051]: CRYPTO_KEY_USER pid=87051 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87051 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[87075]: CRYPTO_KEY_USER pid=87075 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87075 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:10 managed-node3 audit[87100]: CRYPTO_KEY_USER pid=87100 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87100 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:10 managed-node3 python3[87126]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 25 11:38:10 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:10 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[87134]: CRYPTO_KEY_USER pid=87134 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87134 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[87159]: CRYPTO_KEY_USER pid=87159 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87159 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:10 managed-node3 audit[87183]: CRYPTO_KEY_USER pid=87183 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87183 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[87213]: CRYPTO_KEY_USER pid=87213 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87213 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[87237]: CRYPTO_KEY_USER pid=87237 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87237 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:11 managed-node3 audit[87262]: CRYPTO_KEY_USER pid=87262 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87262 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:11 managed-node3 python3[87288]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 25 11:38:11 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[87296]: CRYPTO_KEY_USER pid=87296 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87296 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[87321]: CRYPTO_KEY_USER pid=87321 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87321 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[87345]: CRYPTO_KEY_USER pid=87345 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87345 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[87375]: CRYPTO_KEY_USER pid=87375 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87375 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[87399]: CRYPTO_KEY_USER pid=87399 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87399 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:11 managed-node3 audit[87424]: CRYPTO_KEY_USER pid=87424 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87424 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:11 managed-node3 python3[87450]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[87460]: CRYPTO_KEY_USER pid=87460 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87460 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:11 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[87485]: CRYPTO_KEY_USER pid=87485 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87485 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[87509]: CRYPTO_KEY_USER pid=87509 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87509 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[87539]: CRYPTO_KEY_USER pid=87539 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87539 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:11 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[87563]: CRYPTO_KEY_USER pid=87563 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87563 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:12 managed-node3 audit[87588]: CRYPTO_KEY_USER pid=87588 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87588 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:12 managed-node3 python3[87614]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 25 11:38:12 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:12 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[87622]: CRYPTO_KEY_USER pid=87622 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87622 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[87647]: CRYPTO_KEY_USER pid=87647 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87647 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[87671]: CRYPTO_KEY_USER pid=87671 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87671 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[87701]: CRYPTO_KEY_USER pid=87701 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87701 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[87725]: CRYPTO_KEY_USER pid=87725 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87725 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:12 managed-node3 audit[87750]: CRYPTO_KEY_USER pid=87750 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87750 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:12 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:12 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[87784]: CRYPTO_KEY_USER pid=87784 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87784 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[87809]: CRYPTO_KEY_USER pid=87809 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87809 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[87833]: CRYPTO_KEY_USER pid=87833 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87833 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[87863]: CRYPTO_KEY_USER pid=87863 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87863 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:12 managed-node3 audit[87887]: CRYPTO_KEY_USER pid=87887 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87887 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:13 managed-node3 audit[87912]: CRYPTO_KEY_USER pid=87912 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87912 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:13 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:13 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[87945]: CRYPTO_KEY_USER pid=87945 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87945 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[87970]: CRYPTO_KEY_USER pid=87970 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87970 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[87994]: CRYPTO_KEY_USER pid=87994 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=87994 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[88024]: CRYPTO_KEY_USER pid=88024 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88024 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[88048]: CRYPTO_KEY_USER pid=88048 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88048 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:13 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:13 managed-node3 audit[88073]: CRYPTO_KEY_USER pid=88073 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88073 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:13 managed-node3 python3[88099]: ansible-service_facts Invoked Jan 25 11:38:14 managed-node3 systemd[1]: /usr/lib/systemd/system/lvm-devices-import.service:8: Unknown key 'ConditionPathExists' in section [Service], ignoring. Jan 25 11:38:16 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[88237]: CRYPTO_KEY_USER pid=88237 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88237 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[88262]: CRYPTO_KEY_USER pid=88262 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88262 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:16 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 25 11:38:16 managed-node3 audit[1]: SERVICE_STOP pid=1 uid=0 auid=4294967295 ses=4294967295 subj=system_u:system_r:init_t:s0 msg='unit=NetworkManager-dispatcher comm="systemd" exe="/usr/lib/systemd/systemd" hostname=? addr=? terminal=? res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[88287]: CRYPTO_KEY_USER pid=88287 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88287 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[88317]: CRYPTO_KEY_USER pid=88317 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88317 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[88341]: CRYPTO_KEY_USER pid=88341 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88341 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:16 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:16 managed-node3 audit[88366]: CRYPTO_KEY_USER pid=88366 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88366 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:17 managed-node3 python3[88392]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 25 11:38:17 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:17 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:17 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:17 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:17 managed-node3 audit[88395]: CRYPTO_KEY_USER pid=88395 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88395 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:17 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:17 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:17 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:17 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:17 managed-node3 audit[88420]: CRYPTO_KEY_USER pid=88420 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88420 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:17 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:17 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:17 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:17 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:17 managed-node3 audit[88444]: CRYPTO_KEY_USER pid=88444 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88444 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[88474]: CRYPTO_KEY_USER pid=88474 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88474 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[88498]: CRYPTO_KEY_USER pid=88498 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88498 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:18 managed-node3 audit[88523]: CRYPTO_KEY_USER pid=88523 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88523 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:18 managed-node3 python3[88549]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 25 11:38:18 managed-node3 systemd[1]: Reload requested from client PID 88552 ('systemctl') (unit session-10.scope)... Jan 25 11:38:18 managed-node3 systemd[1]: Reloading... Jan 25 11:38:18 managed-node3 systemd[1]: Reloading finished in 231 ms. Jan 25 11:38:18 managed-node3 audit: BPF prog-id=271 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=262 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=272 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=273 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=263 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=264 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=274 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=261 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=275 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=253 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=276 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=277 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=254 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=255 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=278 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=260 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=279 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=280 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=265 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=266 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=281 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=256 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=282 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=283 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=257 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=258 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=284 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=270 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=285 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=267 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=286 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=287 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=268 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=269 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=288 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=250 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=289 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=290 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=251 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=252 op=UNLOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=291 op=LOAD Jan 25 11:38:18 managed-node3 audit: BPF prog-id=259 op=UNLOAD Jan 25 11:38:18 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[88605]: CRYPTO_KEY_USER pid=88605 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88605 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[88630]: CRYPTO_KEY_USER pid=88630 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88630 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[88654]: CRYPTO_KEY_USER pid=88654 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88654 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[88684]: CRYPTO_KEY_USER pid=88684 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88684 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[88708]: CRYPTO_KEY_USER pid=88708 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88708 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:18 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:18 managed-node3 audit[88733]: CRYPTO_KEY_USER pid=88733 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88733 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:19 managed-node3 python3[88759]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 25 11:38:19 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[88762]: CRYPTO_KEY_USER pid=88762 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88762 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[88787]: CRYPTO_KEY_USER pid=88787 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88787 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[88811]: CRYPTO_KEY_USER pid=88811 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88811 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[88841]: CRYPTO_KEY_USER pid=88841 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88841 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[88865]: CRYPTO_KEY_USER pid=88865 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88865 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:19 managed-node3 audit[88890]: CRYPTO_KEY_USER pid=88890 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88890 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[88917]: CRYPTO_KEY_USER pid=88917 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88917 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[88942]: CRYPTO_KEY_USER pid=88942 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88942 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[88966]: CRYPTO_KEY_USER pid=88966 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88966 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[88996]: CRYPTO_KEY_USER pid=88996 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=88996 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[89020]: CRYPTO_KEY_USER pid=89020 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89020 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:19 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:20 managed-node3 audit[89045]: CRYPTO_KEY_USER pid=89045 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89045 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:20 managed-node3 python3[89071]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 25 11:38:20 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[89072]: CRYPTO_KEY_USER pid=89072 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89072 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[89097]: CRYPTO_KEY_USER pid=89097 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89097 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[89121]: CRYPTO_KEY_USER pid=89121 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89121 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[89151]: CRYPTO_KEY_USER pid=89151 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89151 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[89175]: CRYPTO_KEY_USER pid=89175 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89175 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:20 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:20 managed-node3 audit[89200]: CRYPTO_KEY_USER pid=89200 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89200 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:20 managed-node3 python3[89226]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 25 11:38:20 managed-node3 systemd[1]: Reload requested from client PID 89227 ('systemctl') (unit session-10.scope)... Jan 25 11:38:20 managed-node3 systemd[1]: Reloading... Jan 25 11:38:20 managed-node3 systemd[1]: Reloading finished in 233 ms. Jan 25 11:38:21 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[89280]: CRYPTO_KEY_USER pid=89280 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89280 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[89305]: CRYPTO_KEY_USER pid=89305 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89305 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[89329]: CRYPTO_KEY_USER pid=89329 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89329 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[89359]: CRYPTO_KEY_USER pid=89359 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89359 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[89383]: CRYPTO_KEY_USER pid=89383 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89383 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:21 managed-node3 audit[89408]: CRYPTO_KEY_USER pid=89408 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89408 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:21 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:21 managed-node3 audit: BPF prog-id=292 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=275 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=293 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=294 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=276 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=277 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=295 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=278 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=296 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=288 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=297 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=298 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=289 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=290 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=299 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=284 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=300 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=291 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=301 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=271 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=302 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=303 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=272 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=273 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=304 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=274 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=305 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=285 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=306 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=307 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=286 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=287 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=308 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=281 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=309 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=310 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=282 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=283 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=311 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=312 op=LOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=279 op=UNLOAD Jan 25 11:38:21 managed-node3 audit: BPF prog-id=280 op=UNLOAD Jan 25 11:38:21 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[89443]: CRYPTO_KEY_USER pid=89443 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89443 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[89468]: CRYPTO_KEY_USER pid=89468 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89468 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[89492]: CRYPTO_KEY_USER pid=89492 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89492 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[89522]: CRYPTO_KEY_USER pid=89522 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89522 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[89546]: CRYPTO_KEY_USER pid=89546 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89546 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:21 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:21 managed-node3 audit[89571]: CRYPTO_KEY_USER pid=89571 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89571 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:22 managed-node3 python3[89597]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 25 11:38:22 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:22 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[89606]: CRYPTO_KEY_USER pid=89606 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89606 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[89631]: CRYPTO_KEY_USER pid=89631 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89631 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[89655]: CRYPTO_KEY_USER pid=89655 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89655 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[89685]: CRYPTO_KEY_USER pid=89685 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89685 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[89709]: CRYPTO_KEY_USER pid=89709 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89709 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:22 managed-node3 audit[89734]: CRYPTO_KEY_USER pid=89734 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89734 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:22 managed-node3 python3[89760]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 25 11:38:22 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:22 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[89768]: CRYPTO_KEY_USER pid=89768 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89768 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[89793]: CRYPTO_KEY_USER pid=89793 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89793 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[89817]: CRYPTO_KEY_USER pid=89817 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89817 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[89847]: CRYPTO_KEY_USER pid=89847 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89847 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[89871]: CRYPTO_KEY_USER pid=89871 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89871 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:22 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:22 managed-node3 audit[89896]: CRYPTO_KEY_USER pid=89896 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89896 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:23 managed-node3 python3[89922]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 25 11:38:23 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:23 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[89930]: CRYPTO_KEY_USER pid=89930 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89930 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[89955]: CRYPTO_KEY_USER pid=89955 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89955 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[89979]: CRYPTO_KEY_USER pid=89979 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=89979 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[90009]: CRYPTO_KEY_USER pid=90009 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90009 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[90033]: CRYPTO_KEY_USER pid=90033 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90033 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:23 managed-node3 audit[90058]: CRYPTO_KEY_USER pid=90058 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90058 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:23 managed-node3 python3[90084]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 25 11:38:23 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:23 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[90093]: CRYPTO_KEY_USER pid=90093 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90093 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[90118]: CRYPTO_KEY_USER pid=90118 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90118 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[90142]: CRYPTO_KEY_USER pid=90142 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90142 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[90172]: CRYPTO_KEY_USER pid=90172 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90172 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[90196]: CRYPTO_KEY_USER pid=90196 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90196 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:23 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:23 managed-node3 audit[90221]: CRYPTO_KEY_USER pid=90221 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90221 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:24 managed-node3 python3[90247]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 25 11:38:24 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[90255]: CRYPTO_KEY_USER pid=90255 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90255 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[90280]: CRYPTO_KEY_USER pid=90280 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90280 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[90304]: CRYPTO_KEY_USER pid=90304 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90304 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[90334]: CRYPTO_KEY_USER pid=90334 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90334 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[90358]: CRYPTO_KEY_USER pid=90358 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90358 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:24 managed-node3 audit[90383]: CRYPTO_KEY_USER pid=90383 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90383 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:24 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[90417]: CRYPTO_KEY_USER pid=90417 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90417 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[90442]: CRYPTO_KEY_USER pid=90442 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90442 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[90466]: CRYPTO_KEY_USER pid=90466 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90466 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[90496]: CRYPTO_KEY_USER pid=90496 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90496 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[90520]: CRYPTO_KEY_USER pid=90520 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90520 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:24 managed-node3 audit[90545]: CRYPTO_KEY_USER pid=90545 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90545 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:24 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 25 11:38:24 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:24 managed-node3 audit[90579]: CRYPTO_KEY_USER pid=90579 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90579 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[90604]: CRYPTO_KEY_USER pid=90604 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90604 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[90628]: CRYPTO_KEY_USER pid=90628 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90628 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[90658]: CRYPTO_KEY_USER pid=90658 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90658 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[90682]: CRYPTO_KEY_USER pid=90682 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90682 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:25 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:25 managed-node3 audit[90707]: CRYPTO_KEY_USER pid=90707 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90707 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:25 managed-node3 python3[90733]: ansible-service_facts Invoked Jan 25 11:38:26 managed-node3 systemd[1]: /usr/lib/systemd/system/lvm-devices-import.service:8: Unknown key 'ConditionPathExists' in section [Service], ignoring. Jan 25 11:38:27 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:27 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Jan 25 11:38:27 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:27 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:27 managed-node3 audit[90871]: CRYPTO_KEY_USER pid=90871 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90871 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:27 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:27 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[90896]: CRYPTO_KEY_USER pid=90896 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90896 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[90920]: CRYPTO_KEY_USER pid=90920 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90920 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[90950]: CRYPTO_KEY_USER pid=90950 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90950 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[90974]: CRYPTO_KEY_USER pid=90974 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90974 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=? res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_END pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_LOGOUT pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=ssh res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_LOGIN pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:28 managed-node3 audit[83543]: USER_START pid=83543 uid=0 auid=0 ses=10 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.10.205 terminal=/dev/pts/0 res=success' Jan 25 11:38:28 managed-node3 audit[90999]: CRYPTO_KEY_USER pid=90999 uid=0 auid=0 ses=10 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:4d:6b:3f:59:69:45:9c:32:d9:1b:a1:56:42:a6:40:a3:79:a7:50:bb:cd:51:4f:94:63:4b:b8:3f:66:06:8c:8f direction=? spid=90999 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.10.205 terminal=pts/0 res=success' Jan 25 11:38:28 managed-node3 python3[91025]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node3 : ok=371 changed=28 unreachable=0 failed=1 skipped=391 rescued=1 ignored=1 TASKS RECAP ******************************************************************** Saturday 25 January 2025 11:38:28 -0500 (0:00:00.419) 0:02:30.327 ****** =============================================================================== fedora.linux_system_roles.podman : Stop and disable service ------------ 11.32s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : Stop and disable service ------------ 11.19s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.91s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.88s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.80s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.75s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : Ensure container images are present --- 1.78s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Remove quadlet file ------------------ 1.77s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 fedora.linux_system_roles.podman : Ensure container images are present --- 1.64s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Start service ------------------------ 1.44s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 fedora.linux_system_roles.podman : Start service ------------------------ 1.36s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Gathering Facts --------------------------------------------------------- 1.28s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.19s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Reload systemctl --------------------- 1.04s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.03s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.01s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.98s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.96s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Reload systemctl --------------------- 0.95s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.88s /tmp/collections-c17/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70