ansible-playbook [core 2.17.8] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.13/site-packages/ansible ansible collection location = /tmp/collections-xcg executable location = /usr/local/bin/ansible-playbook python version = 3.13.2 (main, Feb 4 2025, 00:00:00) [GCC 14.2.1 20250110 (Red Hat 14.2.1-7)] (/usr/bin/python3.13) jinja version = 3.1.5 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_pod.yml ************************************************ 2 plays in /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:5 Saturday 15 February 2025 11:43:19 -0500 (0:00:00.011) 0:00:00.011 ***** [WARNING]: Found variable using reserved name: q ok: [managed-node3] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-NQX/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role can manage quadlet pods] **************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 Saturday 15 February 2025 11:43:19 -0500 (0:00:00.032) 0:00:00.044 ***** [WARNING]: Platform linux on host managed-node3 is using the discovered Python interpreter at /usr/bin/python3, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node3] TASK [Run the role - root] ***************************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:34 Saturday 15 February 2025 11:43:21 -0500 (0:00:01.485) 0:00:01.529 ***** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 15 February 2025 11:43:21 -0500 (0:00:00.103) 0:00:01.633 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 15 February 2025 11:43:21 -0500 (0:00:00.038) 0:00:01.671 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 15 February 2025 11:43:21 -0500 (0:00:00.048) 0:00:01.719 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 15 February 2025 11:43:22 -0500 (0:00:00.671) 0:00:02.390 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 15 February 2025 11:43:22 -0500 (0:00:00.070) 0:00:02.461 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 15 February 2025 11:43:22 -0500 (0:00:00.489) 0:00:02.951 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 15 February 2025 11:43:22 -0500 (0:00:00.080) 0:00:03.032 ***** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [managed-node3] => (item=Fedora.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 15 February 2025 11:43:22 -0500 (0:00:00.090) 0:00:03.122 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 15 February 2025 11:43:24 -0500 (0:00:01.322) 0:00:04.445 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 15 February 2025 11:43:24 -0500 (0:00:00.044) 0:00:04.489 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 15 February 2025 11:43:24 -0500 (0:00:00.047) 0:00:04.537 ***** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 15 February 2025 11:43:24 -0500 (0:00:00.050) 0:00:04.588 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 15 February 2025 11:43:24 -0500 (0:00:00.061) 0:00:04.649 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 15 February 2025 11:43:24 -0500 (0:00:00.073) 0:00:04.723 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.030906", "end": "2025-02-15 11:43:24.788898", "rc": 0, "start": "2025-02-15 11:43:24.757992" } STDOUT: podman version 5.3.2 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 15 February 2025 11:43:24 -0500 (0:00:00.533) 0:00:05.256 ***** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 15 February 2025 11:43:24 -0500 (0:00:00.034) 0:00:05.291 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 15 February 2025 11:43:24 -0500 (0:00:00.034) 0:00:05.326 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 15 February 2025 11:43:24 -0500 (0:00:00.056) 0:00:05.383 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.052) 0:00:05.436 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.073) 0:00:05.509 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.050) 0:00:05.559 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.061) 0:00:05.621 ***** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.515) 0:00:06.137 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.058) 0:00:06.196 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.071) 0:00:06.267 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.446) 0:00:06.713 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.033) 0:00:06.747 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.034) 0:00:06.781 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.036) 0:00:06.817 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.031) 0:00:06.848 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.031) 0:00:06.880 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.031) 0:00:06.911 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.030) 0:00:06.942 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.030) 0:00:06.972 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.060) 0:00:07.033 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.061) 0:00:07.095 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.034) 0:00:07.129 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.049) 0:00:07.179 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.103) 0:00:07.283 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.056) 0:00:07.339 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.058) 0:00:07.397 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.110) 0:00:07.507 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.052) 0:00:07.560 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.055) 0:00:07.615 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.114) 0:00:07.729 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.046) 0:00:07.776 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.056) 0:00:07.832 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.052) 0:00:07.885 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.055) 0:00:07.941 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.053) 0:00:07.994 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.054) 0:00:08.049 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.048) 0:00:08.098 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.051) 0:00:08.150 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.044) 0:00:08.195 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.044) 0:00:08.239 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.046) 0:00:08.285 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.158) 0:00:08.444 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.069) 0:00:08.514 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.069) 0:00:08.583 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.055) 0:00:08.639 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.061) 0:00:08.701 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.072) 0:00:08.774 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.045) 0:00:08.819 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.039) 0:00:08.859 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.044) 0:00:08.903 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.408) 0:00:09.311 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.038) 0:00:09.349 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.038) 0:00:09.388 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.033) 0:00:09.422 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.041) 0:00:09.463 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.033) 0:00:09.496 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.032) 0:00:09.529 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.037) 0:00:09.567 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.080) 0:00:09.647 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.067) 0:00:09.714 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.034) 0:00:09.749 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.033) 0:00:09.782 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.077) 0:00:09.860 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.039) 0:00:09.900 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.034) 0:00:09.934 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.071) 0:00:10.005 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.058) 0:00:10.064 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.036) 0:00:10.101 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.052) 0:00:10.153 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.048) 0:00:10.202 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.038) 0:00:10.240 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.031) 0:00:10.272 ***** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 4096, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.536) 0:00:10.808 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.033) 0:00:10.842 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.036) 0:00:10.878 ***** changed: [managed-node3] => { "changed": true, "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "dest": "/etc/containers/systemd/quadlet-pod-pod.pod", "gid": 0, "group": "root", "md5sum": "43c9e9c2ff3ad9cd27c1f2d12f03aee0", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1739637810.5389552-19891-36825919936081/.source.pod", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.918) 0:00:11.796 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 15 February 2025 11:43:32 -0500 (0:00:01.029) 0:00:12.826 ***** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-pod-pod.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target sysinit.target -.mount system.slice basic.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893923840", "EffectiveMemoryMax": "3893923840", "EffectiveTasksMax": "4414", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14714", "LimitNPROCSoft": "14714", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14714", "LimitSIGPENDINGSoft": "14714", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3492642816", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4414", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 15 February 2025 11:43:33 -0500 (0:00:01.268) 0:00:14.094 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:43:33 -0500 (0:00:00.035) 0:00:14.129 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:43:33 -0500 (0:00:00.059) 0:00:14.189 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:43:33 -0500 (0:00:00.065) 0:00:14.255 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:33 -0500 (0:00:00.036) 0:00:14.291 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:43:33 -0500 (0:00:00.057) 0:00:14.348 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.068) 0:00:14.416 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.044) 0:00:14.461 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.038) 0:00:14.500 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.055) 0:00:14.555 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.413) 0:00:14.969 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.035) 0:00:15.004 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.035) 0:00:15.040 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.041) 0:00:15.082 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.037) 0:00:15.119 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.035) 0:00:15.155 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.035) 0:00:15.191 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.034) 0:00:15.225 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.034) 0:00:15.260 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.068) 0:00:15.328 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.060) 0:00:15.389 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.050) 0:00:15.439 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.090) 0:00:15.530 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.056) 0:00:15.587 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.034) 0:00:15.622 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.073) 0:00:15.695 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.055) 0:00:15.751 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.031) 0:00:15.782 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.034) 0:00:15.816 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.034) 0:00:15.851 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.051) 0:00:15.902 ***** changed: [managed-node3] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 15 February 2025 11:43:37 -0500 (0:00:01.627) 0:00:17.530 ***** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 4096, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:37 -0500 (0:00:00.463) 0:00:17.994 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 15 February 2025 11:43:37 -0500 (0:00:00.037) 0:00:18.031 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 15 February 2025 11:43:37 -0500 (0:00:00.037) 0:00:18.069 ***** changed: [managed-node3] => { "changed": true, "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "dest": "/etc/containers/systemd/quadlet-pod-container.container", "gid": 0, "group": "root", "md5sum": "daaf6e904ff3c17edeb801084cfe256f", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 230, "src": "/root/.ansible/tmp/ansible-tmp-1739637817.729765-20111-98568911346954/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 15 February 2025 11:43:38 -0500 (0:00:00.812) 0:00:18.882 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 15 February 2025 11:43:39 -0500 (0:00:00.860) 0:00:19.742 ***** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-container.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "quadlet-pod-pod-pod.service -.mount system.slice sysinit.target basic.target systemd-journald.socket network-online.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893923840", "EffectiveMemoryMax": "3893923840", "EffectiveTasksMax": "4414", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14714", "LimitNPROCSoft": "14714", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14714", "LimitSIGPENDINGSoft": "14714", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3480096768", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4414", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target quadlet-pod-pod-pod.service", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 15 February 2025 11:43:40 -0500 (0:00:00.808) 0:00:20.551 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 15 February 2025 11:43:40 -0500 (0:00:00.033) 0:00:20.584 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 15 February 2025 11:43:40 -0500 (0:00:00.028) 0:00:20.612 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 15 February 2025 11:43:40 -0500 (0:00:00.041) 0:00:20.654 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:40 Saturday 15 February 2025 11:43:40 -0500 (0:00:00.080) 0:00:20.734 ***** ok: [managed-node3] => (item=quadlet-pod-container.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-pod-container.container" ], "delta": "0:00:00.003858", "end": "2025-02-15 11:43:40.675532", "item": "quadlet-pod-container.container", "rc": 0, "start": "2025-02-15 11:43:40.671674" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/libpod/testimage:20210610 ContainerName=quadlet-pod-container Pod=quadlet-pod-pod.pod Exec=/bin/busybox-extras httpd -f -p 80 ok: [managed-node3] => (item=quadlet-pod-pod.pod) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-pod-pod.pod" ], "delta": "0:00:00.003148", "end": "2025-02-15 11:43:41.043885", "item": "quadlet-pod-pod.pod", "rc": 0, "start": "2025-02-15 11:43:41.040737" } STDOUT: # # Ansible managed # # system_role:podman [Pod] PodName=quadlet-pod TASK [Check pod] *************************************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:49 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.770) 0:00:21.505 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "pod", "inspect", "quadlet-pod", "--format", "{{range .Containers}}{{.Name}}\n{{end}}" ], "delta": "0:00:00.043440", "end": "2025-02-15 11:43:41.483827", "failed_when_result": false, "rc": 0, "start": "2025-02-15 11:43:41.440387" } STDOUT: quadlet-pod-infra quadlet-pod-container TASK [Create user for testing] ************************************************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:57 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.447) 0:00:21.952 ***** changed: [managed-node3] => { "changed": true, "comment": "", "create_home": true, "group": 2223, "home": "/home/user_quadlet_pod", "name": "user_quadlet_pod", "shell": "/bin/bash", "state": "present", "system": false, "uid": 2223 } TASK [Run the role - user] ***************************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:62 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.648) 0:00:22.600 ***** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.110) 0:00:22.711 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.053) 0:00:22.764 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.039) 0:00:22.804 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.035) 0:00:22.839 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.032) 0:00:22.872 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.032) 0:00:22.905 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.031) 0:00:22.936 ***** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [managed-node3] => (item=Fedora.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.069) 0:00:23.006 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 15 February 2025 11:43:43 -0500 (0:00:00.963) 0:00:23.969 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 15 February 2025 11:43:43 -0500 (0:00:00.033) 0:00:24.003 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 15 February 2025 11:43:43 -0500 (0:00:00.036) 0:00:24.039 ***** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 15 February 2025 11:43:43 -0500 (0:00:00.034) 0:00:24.074 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 15 February 2025 11:43:43 -0500 (0:00:00.035) 0:00:24.109 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 15 February 2025 11:43:43 -0500 (0:00:00.032) 0:00:24.142 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.028463", "end": "2025-02-15 11:43:44.110274", "rc": 0, "start": "2025-02-15 11:43:44.081811" } STDOUT: podman version 5.3.2 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.432) 0:00:24.574 ***** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.036) 0:00:24.610 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.030) 0:00:24.641 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.035) 0:00:24.676 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.049) 0:00:24.726 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.051) 0:00:24.777 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.052) 0:00:24.830 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.065) 0:00:24.895 ***** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "user_quadlet_pod": [ "x", "2223", "2223", "", "/home/user_quadlet_pod", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.411) 0:00:25.307 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.036) 0:00:25.344 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:45 -0500 (0:00:00.045) 0:00:25.389 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:45 -0500 (0:00:00.409) 0:00:25.799 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003991", "end": "2025-02-15 11:43:45.748287", "rc": 0, "start": "2025-02-15 11:43:45.744296" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:45 -0500 (0:00:00.415) 0:00:26.214 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005960", "end": "2025-02-15 11:43:46.164435", "rc": 0, "start": "2025-02-15 11:43:46.158475" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.412) 0:00:26.627 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.052) 0:00:26.680 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.035) 0:00:26.715 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.041) 0:00:26.756 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.033) 0:00:26.790 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.039) 0:00:26.829 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.034) 0:00:26.863 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/root/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/root/.config/containers/policy.json", "__podman_registries_conf_file": "/root/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/root/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.041) 0:00:26.905 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.058) 0:00:26.964 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.033) 0:00:26.998 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.036) 0:00:27.034 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.059) 0:00:27.094 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.032) 0:00:27.126 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.033) 0:00:27.159 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.065) 0:00:27.224 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.033) 0:00:27.257 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.032) 0:00:27.290 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.063) 0:00:27.354 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.076) 0:00:27.430 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.034) 0:00:27.464 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.032) 0:00:27.497 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.032) 0:00:27.530 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.032) 0:00:27.563 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.032) 0:00:27.596 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.038) 0:00:27.634 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.028) 0:00:27.663 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.028) 0:00:27.691 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.029) 0:00:27.720 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.036) 0:00:27.757 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.093) 0:00:27.850 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.043) 0:00:27.894 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.042) 0:00:27.936 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.033) 0:00:27.970 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.049) 0:00:28.019 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.066) 0:00:28.085 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.038) 0:00:28.123 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.037) 0:00:28.161 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.045) 0:00:28.206 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:48 -0500 (0:00:00.406) 0:00:28.613 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004231", "end": "2025-02-15 11:43:48.554124", "rc": 0, "start": "2025-02-15 11:43:48.549893" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:48 -0500 (0:00:00.409) 0:00:29.022 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006850", "end": "2025-02-15 11:43:48.973726", "rc": 0, "start": "2025-02-15 11:43:48.966876" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.414) 0:00:29.437 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.051) 0:00:29.488 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.037) 0:00:29.526 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.038) 0:00:29.564 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.038) 0:00:29.602 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.044) 0:00:29.647 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.039) 0:00:29.686 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.060) 0:00:29.747 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.047) 0:00:29.794 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.036) 0:00:29.831 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.095) 0:00:29.927 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.075) 0:00:30.002 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.056) 0:00:30.059 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.080) 0:00:30.140 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.067) 0:00:30.207 ***** changed: [managed-node3] => { "changed": true, "cmd": [ "loginctl", "enable-linger", "user_quadlet_pod" ], "delta": "0:00:00.019577", "end": "2025-02-15 11:43:50.172501", "rc": 0, "start": "2025-02-15 11:43:50.152924" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.466) 0:00:30.674 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.039) 0:00:30.714 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.034) 0:00:30.749 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.028) 0:00:30.777 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.043) 0:00:30.821 ***** changed: [managed-node3] => { "changed": true, "gid": 2223, "group": "user_quadlet_pod", "mode": "0755", "owner": "user_quadlet_pod", "path": "/home/user_quadlet_pod/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 4096, "state": "directory", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.427) 0:00:31.248 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.048) 0:00:31.296 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.054) 0:00:31.350 ***** changed: [managed-node3] => { "changed": true, "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "dest": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "gid": 2223, "group": "user_quadlet_pod", "md5sum": "43c9e9c2ff3ad9cd27c1f2d12f03aee0", "mode": "0644", "owner": "user_quadlet_pod", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1739637831.025227-20452-141306933816545/.source.pod", "state": "file", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 15 February 2025 11:43:51 -0500 (0:00:00.822) 0:00:32.172 ***** [WARNING]: Module remote_tmp /home/user_quadlet_pod/.ansible/tmp did not exist and was created with a mode of 0700, this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.759) 0:00:32.932 ***** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-pod-pod.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target podman-user-wait-network-online.service app.slice run-user-2223.mount -.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/user/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893923840", "EffectiveMemoryMax": "3893923840", "EffectiveTasksMax": "4414", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14714", "LimitNPROCSoft": "14714", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14714", "LimitSIGPENDINGSoft": "14714", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3886530560", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/user/2223/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4414", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 15 February 2025 11:43:54 -0500 (0:00:01.535) 0:00:34.468 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.042) 0:00:34.511 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.048) 0:00:34.560 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.048) 0:00:34.608 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.052) 0:00:34.661 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.067) 0:00:34.728 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.075) 0:00:34.803 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.044) 0:00:34.847 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.049) 0:00:34.896 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.056) 0:00:34.953 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.425) 0:00:35.379 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004923", "end": "2025-02-15 11:43:55.333626", "rc": 0, "start": "2025-02-15 11:43:55.328703" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.420) 0:00:35.799 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006628", "end": "2025-02-15 11:43:55.784389", "rc": 0, "start": "2025-02-15 11:43:55.777761" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.452) 0:00:36.251 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.109) 0:00:36.361 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.061) 0:00:36.422 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.074) 0:00:36.497 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.054) 0:00:36.552 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.050) 0:00:36.602 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.058) 0:00:36.661 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.101) 0:00:36.762 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.046) 0:00:36.808 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.048) 0:00:36.857 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.136) 0:00:36.993 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.051) 0:00:37.045 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.039) 0:00:37.084 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.092) 0:00:37.177 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.057) 0:00:37.234 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_pod" ], "delta": null, "end": null, "rc": 0, "start": null } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_pod exists MSG: Did not run command since '/var/lib/systemd/linger/user_quadlet_pod' exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:43:57 -0500 (0:00:00.429) 0:00:37.663 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:43:57 -0500 (0:00:00.049) 0:00:37.713 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 15 February 2025 11:43:57 -0500 (0:00:00.042) 0:00:37.755 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 15 February 2025 11:43:57 -0500 (0:00:00.038) 0:00:37.794 ***** changed: [managed-node3] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 15 February 2025 11:43:58 -0500 (0:00:01.569) 0:00:39.363 ***** ok: [managed-node3] => { "changed": false, "gid": 2223, "group": "user_quadlet_pod", "mode": "0755", "owner": "user_quadlet_pod", "path": "/home/user_quadlet_pod/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 4096, "state": "directory", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:59 -0500 (0:00:00.436) 0:00:39.800 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 15 February 2025 11:43:59 -0500 (0:00:00.037) 0:00:39.837 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 15 February 2025 11:43:59 -0500 (0:00:00.044) 0:00:39.882 ***** changed: [managed-node3] => { "changed": true, "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "dest": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "gid": 2223, "group": "user_quadlet_pod", "md5sum": "daaf6e904ff3c17edeb801084cfe256f", "mode": "0644", "owner": "user_quadlet_pod", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 230, "src": "/root/.ansible/tmp/ansible-tmp-1739637839.5501642-20826-239127929999366/.source.container", "state": "file", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 15 February 2025 11:44:00 -0500 (0:00:00.840) 0:00:40.722 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 15 February 2025 11:44:01 -0500 (0:00:00.718) 0:00:41.441 ***** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-container.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "podman-user-wait-network-online.service basic.target run-user-2223.mount quadlet-pod-pod-pod.service -.mount app.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "default.target shutdown.target", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/user/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893923840", "EffectiveMemoryMax": "3893923840", "EffectiveTasksMax": "4414", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14714", "LimitNPROCSoft": "14714", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14714", "LimitSIGPENDINGSoft": "14714", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3857342464", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4414", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "default.target quadlet-pod-pod-pod.service", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 15 February 2025 11:44:01 -0500 (0:00:00.858) 0:00:42.299 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 15 February 2025 11:44:01 -0500 (0:00:00.088) 0:00:42.388 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 15 February 2025 11:44:02 -0500 (0:00:00.067) 0:00:42.456 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 15 February 2025 11:44:02 -0500 (0:00:00.085) 0:00:42.541 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:70 Saturday 15 February 2025 11:44:02 -0500 (0:00:00.117) 0:00:42.659 ***** ok: [managed-node3] => (item=quadlet-pod-container.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container" ], "delta": "0:00:00.003417", "end": "2025-02-15 11:44:02.682173", "item": "quadlet-pod-container.container", "rc": 0, "start": "2025-02-15 11:44:02.678756" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/libpod/testimage:20210610 ContainerName=quadlet-pod-container Pod=quadlet-pod-pod.pod Exec=/bin/busybox-extras httpd -f -p 80 ok: [managed-node3] => (item=quadlet-pod-pod.pod) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod" ], "delta": "0:00:00.003568", "end": "2025-02-15 11:44:03.086956", "item": "quadlet-pod-pod.pod", "rc": 0, "start": "2025-02-15 11:44:03.083388" } STDOUT: # # Ansible managed # # system_role:podman [Pod] PodName=quadlet-pod TASK [Check pod] *************************************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:79 Saturday 15 February 2025 11:44:03 -0500 (0:00:00.888) 0:00:43.548 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "pod", "inspect", "quadlet-pod", "--format", "{{range .Containers}}{{.Name}}\n{{end}}" ], "delta": "0:00:00.064285", "end": "2025-02-15 11:44:03.637684", "failed_when_result": false, "rc": 0, "start": "2025-02-15 11:44:03.573399" } STDOUT: quadlet-pod-infra quadlet-pod-container TASK [Ensure linger] *********************************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:91 Saturday 15 February 2025 11:44:03 -0500 (0:00:00.620) 0:00:44.168 ***** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1739637830.1614892, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1739637830.1614892, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 148, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1739637830.1614892, "nlink": 1, "path": "/var/lib/systemd/linger/user_quadlet_pod", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "2176803396", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Cleanup user] ************************************************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:99 Saturday 15 February 2025 11:44:04 -0500 (0:00:00.550) 0:00:44.719 ***** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 15 February 2025 11:44:04 -0500 (0:00:00.379) 0:00:45.098 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 15 February 2025 11:44:04 -0500 (0:00:00.124) 0:00:45.223 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 15 February 2025 11:44:04 -0500 (0:00:00.083) 0:00:45.306 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 15 February 2025 11:44:04 -0500 (0:00:00.054) 0:00:45.360 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 15 February 2025 11:44:05 -0500 (0:00:00.055) 0:00:45.415 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 15 February 2025 11:44:05 -0500 (0:00:00.055) 0:00:45.471 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 15 February 2025 11:44:05 -0500 (0:00:00.055) 0:00:45.527 ***** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [managed-node3] => (item=Fedora.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 15 February 2025 11:44:05 -0500 (0:00:00.109) 0:00:45.637 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 15 February 2025 11:44:06 -0500 (0:00:01.033) 0:00:46.670 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 15 February 2025 11:44:06 -0500 (0:00:00.033) 0:00:46.704 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 15 February 2025 11:44:06 -0500 (0:00:00.039) 0:00:46.743 ***** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 15 February 2025 11:44:06 -0500 (0:00:00.056) 0:00:46.800 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 15 February 2025 11:44:06 -0500 (0:00:00.054) 0:00:46.855 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 15 February 2025 11:44:06 -0500 (0:00:00.052) 0:00:46.907 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.028779", "end": "2025-02-15 11:44:06.894167", "rc": 0, "start": "2025-02-15 11:44:06.865388" } STDOUT: podman version 5.3.2 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 15 February 2025 11:44:06 -0500 (0:00:00.449) 0:00:47.356 ***** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 15 February 2025 11:44:07 -0500 (0:00:00.037) 0:00:47.394 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 15 February 2025 11:44:07 -0500 (0:00:00.047) 0:00:47.441 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 15 February 2025 11:44:07 -0500 (0:00:00.070) 0:00:47.512 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 15 February 2025 11:44:07 -0500 (0:00:00.077) 0:00:47.589 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 15 February 2025 11:44:07 -0500 (0:00:00.112) 0:00:47.701 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 15 February 2025 11:44:07 -0500 (0:00:00.166) 0:00:47.867 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:44:07 -0500 (0:00:00.193) 0:00:48.061 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:44:07 -0500 (0:00:00.064) 0:00:48.126 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:44:07 -0500 (0:00:00.087) 0:00:48.214 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:44:07 -0500 (0:00:00.074) 0:00:48.289 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:44:08 -0500 (0:00:00.492) 0:00:48.781 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004108", "end": "2025-02-15 11:44:08.778020", "rc": 0, "start": "2025-02-15 11:44:08.773912" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:44:08 -0500 (0:00:00.489) 0:00:49.270 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006559", "end": "2025-02-15 11:44:09.251026", "rc": 0, "start": "2025-02-15 11:44:09.244467" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:44:09 -0500 (0:00:00.491) 0:00:49.761 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:44:09 -0500 (0:00:00.110) 0:00:49.871 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:44:09 -0500 (0:00:00.064) 0:00:49.936 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:44:09 -0500 (0:00:00.075) 0:00:50.011 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:44:09 -0500 (0:00:00.057) 0:00:50.069 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:44:09 -0500 (0:00:00.068) 0:00:50.138 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 15 February 2025 11:44:09 -0500 (0:00:00.054) 0:00:50.192 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/home/user_quadlet_pod/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/home/user_quadlet_pod/.config/containers/policy.json", "__podman_registries_conf_file": "/home/user_quadlet_pod/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/home/user_quadlet_pod/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 15 February 2025 11:44:09 -0500 (0:00:00.083) 0:00:50.276 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.115) 0:00:50.391 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.066) 0:00:50.458 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.066) 0:00:50.525 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.133) 0:00:50.658 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.061) 0:00:50.720 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.054) 0:00:50.774 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.091) 0:00:50.865 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.036) 0:00:50.902 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.036) 0:00:50.939 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.073) 0:00:51.012 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.046) 0:00:51.059 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.056) 0:00:51.116 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.058) 0:00:51.176 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.059) 0:00:51.236 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.065) 0:00:51.301 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.064) 0:00:51.366 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.059) 0:00:51.425 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.053) 0:00:51.479 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.053) 0:00:51.532 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.051) 0:00:51.584 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.050) 0:00:51.634 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.170) 0:00:51.804 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.078) 0:00:51.882 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.071) 0:00:51.954 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.058) 0:00:52.013 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.092) 0:00:52.105 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.111) 0:00:52.217 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.078) 0:00:52.295 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.069) 0:00:52.365 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:44:12 -0500 (0:00:00.077) 0:00:52.442 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:44:12 -0500 (0:00:00.531) 0:00:52.974 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004030", "end": "2025-02-15 11:44:12.961058", "rc": 0, "start": "2025-02-15 11:44:12.957028" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:44:13 -0500 (0:00:00.474) 0:00:53.449 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006531", "end": "2025-02-15 11:44:13.420093", "rc": 0, "start": "2025-02-15 11:44:13.413562" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:44:13 -0500 (0:00:00.492) 0:00:53.941 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:44:13 -0500 (0:00:00.118) 0:00:54.060 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:44:13 -0500 (0:00:00.067) 0:00:54.127 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:44:13 -0500 (0:00:00.074) 0:00:54.202 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:44:13 -0500 (0:00:00.062) 0:00:54.265 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:44:13 -0500 (0:00:00.079) 0:00:54.344 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:44:14 -0500 (0:00:00.069) 0:00:54.413 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:44:14 -0500 (0:00:00.127) 0:00:54.541 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:44:14 -0500 (0:00:00.054) 0:00:54.595 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:44:14 -0500 (0:00:00.069) 0:00:54.665 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:44:14 -0500 (0:00:00.215) 0:00:54.880 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:44:14 -0500 (0:00:00.098) 0:00:54.979 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 15 February 2025 11:44:14 -0500 (0:00:00.160) 0:00:55.140 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637830.192489, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1739637841.730396, "dev": 86, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1739637841.730396, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 220, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 15 February 2025 11:44:15 -0500 (0:00:00.532) 0:00:55.672 ***** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-container.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestamp": "Sat 2025-02-15 11:44:01 EST", "ActiveEnterTimestampMonotonic": "1031880817", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "podman-user-wait-network-online.service basic.target run-user-2223.mount quadlet-pod-pod-pod.service -.mount app.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-02-15 11:44:01 EST", "AssertTimestampMonotonic": "1031745481", "Before": "default.target shutdown.target", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "96080000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-02-15 11:44:01 EST", "ConditionTimestampMonotonic": "1031745475", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/user.slice/user-2223.slice/user@2223.service/app.slice/quadlet-pod-container.service", "ControlGroupId": "14328", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/user/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893923840", "EffectiveMemoryMax": "3893923840", "EffectiveTasksMax": "4414", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "71852", "ExecMainStartTimestamp": "Sat 2025-02-15 11:44:01 EST", "ExecMainStartTimestampMonotonic": "1031880749", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[Sat 2025-02-15 11:44:01 EST] ; stop_time=[n/a] ; pid=71842 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[Sat 2025-02-15 11:44:01 EST] ; stop_time=[n/a] ; pid=71842 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-02-15 11:44:01 EST", "InactiveExitTimestampMonotonic": "1031752428", "InvocationID": "3e3d3b33387b433eaa08ff9e2fe4cd5a", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14714", "LimitNPROCSoft": "14714", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14714", "LimitSIGPENDINGSoft": "14714", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "71852", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3856400384", "MemoryCurrent": "880640", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "18059264", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-02-15 11:44:01 EST", "StateChangeTimestampMonotonic": "1031880817", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "4414", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "default.target quadlet-pod-pod-pod.service", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 15 February 2025 11:44:26 -0500 (0:00:11.176) 0:01:06.848 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637840.9064028, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "ctime": 1739637840.238408, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 279491, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1739637839.9254107, "nlink": 1, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 230, "uid": 2223, "version": "2940983721", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 15 February 2025 11:44:26 -0500 (0:00:00.479) 0:01:07.328 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 15 February 2025 11:44:27 -0500 (0:00:00.109) 0:01:07.437 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 15 February 2025 11:44:27 -0500 (0:00:00.640) 0:01:08.078 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 15 February 2025 11:44:27 -0500 (0:00:00.084) 0:01:08.163 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 15 February 2025 11:44:27 -0500 (0:00:00.051) 0:01:08.215 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 15 February 2025 11:44:27 -0500 (0:00:00.038) 0:01:08.253 ***** changed: [managed-node3] => { "changed": true, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 15 February 2025 11:44:28 -0500 (0:00:00.441) 0:01:08.695 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 15 February 2025 11:44:29 -0500 (0:00:00.752) 0:01:09.448 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 15 February 2025 11:44:29 -0500 (0:00:00.585) 0:01:10.034 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 15 February 2025 11:44:29 -0500 (0:00:00.047) 0:01:10.081 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 15 February 2025 11:44:29 -0500 (0:00:00.040) 0:01:10.122 ***** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.074304", "end": "2025-02-15 11:44:30.234518", "rc": 0, "start": "2025-02-15 11:44:30.160214" } STDOUT: 3b19043cb42404e2229a0bd8cf416f4406b27dc6f1ff3dcd840a8ac1e0daf786 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 15 February 2025 11:44:30 -0500 (0:00:00.601) 0:01:10.723 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:44:30 -0500 (0:00:00.099) 0:01:10.823 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:44:30 -0500 (0:00:00.058) 0:01:10.881 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:44:30 -0500 (0:00:00.057) 0:01:10.939 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 15 February 2025 11:44:30 -0500 (0:00:00.072) 0:01:11.012 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.044945", "end": "2025-02-15 11:44:31.121551", "rc": 0, "start": "2025-02-15 11:44:31.076606" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 15 February 2025 11:44:31 -0500 (0:00:00.615) 0:01:11.627 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.047028", "end": "2025-02-15 11:44:31.754253", "rc": 0, "start": "2025-02-15 11:44:31.707225" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 15 February 2025 11:44:31 -0500 (0:00:00.619) 0:01:12.246 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.044817", "end": "2025-02-15 11:44:32.329167", "rc": 0, "start": "2025-02-15 11:44:32.284350" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 15 February 2025 11:44:32 -0500 (0:00:00.573) 0:01:12.819 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.045244", "end": "2025-02-15 11:44:32.928026", "rc": 0, "start": "2025-02-15 11:44:32.882782" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 15 February 2025 11:44:33 -0500 (0:00:00.591) 0:01:13.411 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 15 February 2025 11:44:33 -0500 (0:00:00.557) 0:01:13.969 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 15 February 2025 11:44:34 -0500 (0:00:00.571) 0:01:14.540 ***** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf5-makecache.service": { "name": "dnf5-makecache.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction-cleanup.service": { "name": "dnf5-offline-transaction-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction.service": { "name": "dnf5-offline-transaction.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-pod-pod-pod.service": { "name": "quadlet-pod-pod-pod.service", "source": "systemd", "state": "running", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-bsod.service": { "name": "systemd-bsod.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed-firstboot.service": { "name": "systemd-homed-firstboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-mountfsd.service": { "name": "systemd-mountfsd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-persistent-storage.service": { "name": "systemd-networkd-persistent-storage.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-nsresourced.service": { "name": "systemd-nsresourced.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-storagetm.service": { "name": "systemd-storagetm.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@2223.service": { "name": "user-runtime-dir@2223.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@2223.service": { "name": "user@2223.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:44:36 -0500 (0:00:02.828) 0:01:17.368 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.035) 0:01:17.403 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.055) 0:01:17.459 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.071) 0:01:17.530 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.060) 0:01:17.591 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.064) 0:01:17.656 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.084) 0:01:17.740 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.045) 0:01:17.786 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.047) 0:01:17.834 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.052) 0:01:17.886 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.421) 0:01:18.308 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004140", "end": "2025-02-15 11:44:38.255143", "rc": 0, "start": "2025-02-15 11:44:38.251003" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.434) 0:01:18.742 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006994", "end": "2025-02-15 11:44:38.695947", "rc": 0, "start": "2025-02-15 11:44:38.688953" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.428) 0:01:19.170 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.084) 0:01:19.254 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.042) 0:01:19.296 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.042) 0:01:19.339 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.043) 0:01:19.382 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.045) 0:01:19.428 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.037) 0:01:19.465 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.060) 0:01:19.525 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.038) 0:01:19.564 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.038) 0:01:19.602 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.167) 0:01:19.770 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.079) 0:01:19.850 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.098) 0:01:19.948 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637830.192489, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1739637866.388198, "dev": 86, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1739637866.388198, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.415) 0:01:20.364 ***** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-pod-pod.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "app.slice -.mount podman-user-wait-network-online.service basic.target run-user-2223.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/user/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893923840", "EffectiveMemoryMax": "3893923840", "EffectiveTasksMax": "4414", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14714", "LimitNPROCSoft": "14714", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14714", "LimitSIGPENDINGSoft": "14714", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3884175360", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/user/2223/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4414", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 15 February 2025 11:44:40 -0500 (0:00:00.742) 0:01:21.106 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637832.3744714, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "ctime": 1739637831.7064767, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 279488, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1739637831.417479, "nlink": 1, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 70, "uid": 2223, "version": "1341012151", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 15 February 2025 11:44:41 -0500 (0:00:00.420) 0:01:21.527 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 15 February 2025 11:44:41 -0500 (0:00:00.068) 0:01:21.595 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 15 February 2025 11:44:41 -0500 (0:00:00.393) 0:01:21.989 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 15 February 2025 11:44:41 -0500 (0:00:00.055) 0:01:22.045 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 15 February 2025 11:44:41 -0500 (0:00:00.041) 0:01:22.087 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 15 February 2025 11:44:41 -0500 (0:00:00.038) 0:01:22.125 ***** changed: [managed-node3] => { "changed": true, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 15 February 2025 11:44:42 -0500 (0:00:00.409) 0:01:22.534 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 15 February 2025 11:44:42 -0500 (0:00:00.716) 0:01:23.251 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.548) 0:01:23.800 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.049) 0:01:23.849 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.038) 0:01:23.887 ***** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.053921", "end": "2025-02-15 11:44:43.976786", "rc": 0, "start": "2025-02-15 11:44:43.922865" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 15 February 2025 11:44:44 -0500 (0:00:00.580) 0:01:24.468 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:44:44 -0500 (0:00:00.120) 0:01:24.588 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:44:44 -0500 (0:00:00.069) 0:01:24.657 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:44:44 -0500 (0:00:00.062) 0:01:24.720 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 15 February 2025 11:44:44 -0500 (0:00:00.072) 0:01:24.793 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.044391", "end": "2025-02-15 11:44:44.867244", "rc": 0, "start": "2025-02-15 11:44:44.822853" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 15 February 2025 11:44:44 -0500 (0:00:00.566) 0:01:25.359 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.048570", "end": "2025-02-15 11:44:45.472060", "rc": 0, "start": "2025-02-15 11:44:45.423490" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 15 February 2025 11:44:45 -0500 (0:00:00.618) 0:01:25.978 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.049580", "end": "2025-02-15 11:44:46.069581", "rc": 0, "start": "2025-02-15 11:44:46.020001" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 15 February 2025 11:44:46 -0500 (0:00:00.580) 0:01:26.558 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.045339", "end": "2025-02-15 11:44:46.636103", "rc": 0, "start": "2025-02-15 11:44:46.590764" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 15 February 2025 11:44:46 -0500 (0:00:00.581) 0:01:27.140 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 15 February 2025 11:44:47 -0500 (0:00:00.553) 0:01:27.693 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 15 February 2025 11:44:47 -0500 (0:00:00.545) 0:01:28.239 ***** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf5-makecache.service": { "name": "dnf5-makecache.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction-cleanup.service": { "name": "dnf5-offline-transaction-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction.service": { "name": "dnf5-offline-transaction.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-pod-pod-pod.service": { "name": "quadlet-pod-pod-pod.service", "source": "systemd", "state": "running", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-bsod.service": { "name": "systemd-bsod.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed-firstboot.service": { "name": "systemd-homed-firstboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-mountfsd.service": { "name": "systemd-mountfsd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-persistent-storage.service": { "name": "systemd-networkd-persistent-storage.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-nsresourced.service": { "name": "systemd-nsresourced.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-storagetm.service": { "name": "systemd-storagetm.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@2223.service": { "name": "user-runtime-dir@2223.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@2223.service": { "name": "user@2223.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:44:50 -0500 (0:00:02.631) 0:01:30.870 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.045) 0:01:30.915 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml for managed-node3 => (item=user_quadlet_pod) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:4 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.100) 0:01:31.016 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_linger_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set cancel linger vars] *************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:11 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.042) 0:01:31.058 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:16 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.043) 0:01:31.102 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637830.192489, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1739637866.388198, "dev": 86, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1739637866.388198, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Gather facts for containers] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:21 Saturday 15 February 2025 11:44:51 -0500 (0:00:00.415) 0:01:31.517 ***** ok: [managed-node3] => { "changed": false, "containers": [] } TASK [fedora.linux_system_roles.podman : Gather facts for networks] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:30 Saturday 15 February 2025 11:44:51 -0500 (0:00:00.637) 0:01:32.154 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-q" ], "delta": "0:00:00.049430", "end": "2025-02-15 11:44:52.235968", "rc": 0, "start": "2025-02-15 11:44:52.186538" } STDOUT: podman TASK [fedora.linux_system_roles.podman : Gather secrets] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:40 Saturday 15 February 2025 11:44:52 -0500 (0:00:00.553) 0:01:32.708 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "secret", "ls", "-n", "-q" ], "delta": "0:00:00.058889", "end": "2025-02-15 11:44:52.787052", "rc": 0, "start": "2025-02-15 11:44:52.728163" } TASK [fedora.linux_system_roles.podman : Cancel linger if no more resources are in use] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:50 Saturday 15 February 2025 11:44:52 -0500 (0:00:00.548) 0:01:33.257 ***** changed: [managed-node3] => { "changed": true, "cmd": [ "loginctl", "disable-linger", "user_quadlet_pod" ], "delta": "0:00:00.007471", "end": "2025-02-15 11:44:53.211124", "rc": 0, "start": "2025-02-15 11:44:53.203653" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 Saturday 15 February 2025 11:44:53 -0500 (0:00:00.447) 0:01:33.704 ***** fatal: [managed-node3]: FAILED! => { "attempts": 1, "changed": false, "cmd": [ "loginctl", "show-user", "--value", "-p", "State", "user_quadlet_pod" ], "delta": "0:00:00.006565", "end": "2025-02-15 11:44:53.647653", "rc": 1, "start": "2025-02-15 11:44:53.641088" } STDERR: Failed to get user: User ID 2223 is not logged in or lingering MSG: non-zero return code ...ignoring TASK [fedora.linux_system_roles.podman : Stop logind] ************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:76 Saturday 15 February 2025 11:44:53 -0500 (0:00:00.409) 0:01:34.114 ***** changed: [managed-node3] => { "changed": true, "name": "systemd-logind", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-02-15 11:43:07 EST", "ActiveEnterTimestampMonotonic": "977257528", "ActiveExitTimestamp": "Sat 2025-02-15 11:43:06 EST", "ActiveExitTimestampMonotonic": "976699087", "ActiveState": "active", "After": "systemd-tmpfiles-setup.service systemd-remount-fs.service nss-user-lookup.target systemd-journald.socket dbus.socket user.slice modprobe@drm.service basic.target -.mount system.slice tmp.mount sysinit.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-02-15 11:43:07 EST", "AssertTimestampMonotonic": "977205073", "Before": "session-8.scope multi-user.target user@0.service shutdown.target user-runtime-dir@0.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.freedesktop.login1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "183847000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanClean": "runtime state fdstore", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_linux_immutable cap_sys_admin cap_sys_tty_config cap_audit_control cap_mac_admin", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-02-15 11:43:07 EST", "ConditionTimestampMonotonic": "977205069", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/systemd-logind.service", "ControlGroupId": "10793", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "User Login Management", "DeviceAllow": "block-* r", "DevicePolicy": "auto", "Documentation": "\"man:sd-login(3)\" \"man:systemd-logind.service(8)\" \"man:logind.conf(5)\" \"man:org.freedesktop.login1(5)\"", "DropInPaths": "/usr/lib/systemd/system/systemd-logind.service.d/10-grub2-logind-service.conf /usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3893923840", "EffectiveMemoryMax": "3893923840", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "4414", "Environment": "SYSTEMD_REBOOT_TO_BOOT_LOADER_MENU=true SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-02-15 11:43:07 EST", "ExecMainHandoffTimestampMonotonic": "977244591", "ExecMainPID": "62216", "ExecMainStartTimestamp": "Sat 2025-02-15 11:43:07 EST", "ExecMainStartTimestampMonotonic": "977208661", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-logind ; argv[]=/usr/lib/systemd/systemd-logind ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-logind ; argv[]=/usr/lib/systemd/systemd-logind ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "768", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/systemd-logind.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPAddressDeny": "::/0 0.0.0.0/0", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-logind.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Sat 2025-02-15 11:43:06 EST", "InactiveEnterTimestampMonotonic": "976718066", "InactiveExitTimestamp": "Sat 2025-02-15 11:43:07 EST", "InactiveExitTimestampMonotonic": "977209259", "InvocationID": "0efb21717957461692766da3bc50245d", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "524288", "LimitNPROC": "14714", "LimitNPROCSoft": "14714", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14714", "LimitSIGPENDINGSoft": "14714", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "62216", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3436851200", "MemoryCurrent": "5976064", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "6504448", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "2", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "systemd-logind.service dbus-org.freedesktop.login1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "yes", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "yes", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "yes", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "strict", "ReadWritePaths": "/etc /run", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/var/lib/systemd/linger /run/systemd/shutdown /run/systemd/inhibit /run/systemd/sessions /run/systemd/users /run/systemd/seats", "Restart": "always", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "0", "RestartUSecNext": "0", "RestrictAddressFamilies": "AF_NETLINK AF_UNIX", "RestrictNamespaces": "yes", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectory": "systemd/inhibit systemd/seats systemd/sessions systemd/shutdown systemd/users", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "yes", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-02-15 11:43:07 EST", "StateChangeTimestampMonotonic": "977257528", "StateDirectory": "systemd/linger", "StateDirectoryMode": "0755", "StatusErrno": "0", "StatusText": "Processing requests...", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "1", "SystemCallFilter": "_llseek _newselect accept accept4 access add_key alarm arch_prctl arm_fadvise64_64 bind brk cacheflush capget capset chdir chmod chown chown32 clock_getres clock_getres_time64 clock_gettime clock_gettime64 clock_nanosleep clock_nanosleep_time64 clone clone3 close close_range connect copy_file_range creat dup dup2 dup3 epoll_create epoll_create1 epoll_ctl epoll_ctl_old epoll_pwait epoll_pwait2 epoll_wait epoll_wait_old eventfd eventfd2 execve execveat exit exit_group faccessat faccessat2 fadvise64 fadvise64_64 fallocate fchdir fchmod fchmodat fchmodat2 fchown fchown32 fchownat fcntl fcntl64 fdatasync fgetxattr flistxattr flock fork fremovexattr fsetxattr fstat fstat64 fstatat64 fstatfs fstatfs64 fsync ftruncate ftruncate64 futex futex_time64 futex_waitv futimesat get_mempolicy get_robust_list get_thread_area getcpu getcwd getdents getdents64 getegid getegid32 geteuid geteuid32 getgid getgid32 getgroups getgroups32 getitimer getpeername getpgid getpgrp getpid getppid getpriority getrandom getresgid getresgid32 getresuid getresuid32 getrlimit getrusage getsid getsockname getsockopt gettid gettimeofday getuid getuid32 getxattr inotify_add_watch inotify_init inotify_init1 inotify_rm_watch io_cancel io_destroy io_getevents io_pgetevents io_pgetevents_time64 io_setup io_submit io_uring_enter io_uring_register io_uring_setup ioctl ioprio_get ioprio_set ipc kcmp keyctl kill landlock_add_rule landlock_create_ruleset landlock_restrict_self lchown lchown32 lgetxattr link linkat listen listxattr llistxattr lremovexattr lseek lsetxattr lstat lstat64 madvise mbind membarrier memfd_create migrate_pages mkdir mkdirat mknod mknodat mlock mlock2 mlockall mmap mmap2 move_pages mprotect mq_getsetattr mq_notify mq_open mq_timedreceive mq_timedreceive_time64 mq_timedsend mq_timedsend_time64 mq_unlink mremap msgctl msgget msgrcv msgsnd msync munlock munlockall munmap name_to_handle_at nanosleep newfstatat nice oldfstat oldlstat oldolduname oldstat olduname open openat openat2 pause personality pidfd_open pidfd_send_signal pipe pipe2 poll ppoll ppoll_time64 prctl pread64 preadv preadv2 prlimit64 process_madvise process_vm_readv process_vm_writev pselect6 pselect6_time64 pwrite64 pwritev pwritev2 read readahead readdir readlink readlinkat readv recv recvfrom recvmmsg recvmmsg_time64 recvmsg remap_file_pages removexattr rename renameat renameat2 request_key restart_syscall riscv_flush_icache rmdir rseq rt_sigaction rt_sigpending rt_sigprocmask rt_sigqueueinfo rt_sigreturn rt_sigsuspend rt_sigtimedwait rt_sigtimedwait_time64 rt_tgsigqueueinfo sched_get_priority_max sched_get_priority_min sched_getaffinity sched_getattr sched_getparam sched_getscheduler sched_rr_get_interval sched_rr_get_interval_time64 sched_setaffinity sched_setattr sched_setparam sched_setscheduler sched_yield seccomp select semctl semget semop semtimedop semtimedop_time64 send sendfile sendfile64 sendmmsg sendmsg sendto set_mempolicy set_mempolicy_home_node set_robust_list set_thread_area set_tid_address set_tls setfsgid setfsgid32 setfsuid setfsuid32 setgid setgid32 setgroups setgroups32 setitimer setns setpgid setpriority setregid setregid32 setresgid setresgid32 setresuid setresuid32 setreuid setreuid32 setrlimit setsid setsockopt setuid setuid32 setxattr shmat shmctl shmdt shmget shutdown sigaction sigaltstack signal signalfd signalfd4 sigpending sigprocmask sigreturn sigsuspend socket socketcall socketpair splice stat stat64 statfs statfs64 statx swapcontext symlink symlinkat sync sync_file_range sync_file_range2 syncfs sysinfo tee tgkill time timer_create timer_delete timer_getoverrun timer_gettime timer_gettime64 timer_settime timer_settime64 timerfd_create timerfd_gettime timerfd_gettime64 timerfd_settime timerfd_settime64 times tkill truncate truncate64 ugetrlimit umask uname unlink unlinkat unshare userfaultfd utime utimensat utimensat_time64 utimes vfork vmsplice wait4 waitid waitpid write writev", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "4414", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify-reload", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "static", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "-.mount tmp.mount user.slice dbus.socket modprobe@drm.service", "WantsMountsFor": "/var/tmp /tmp", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:81 Saturday 15 February 2025 11:44:54 -0500 (0:00:00.603) 0:01:34.717 ***** ok: [managed-node3] => { "attempts": 1, "changed": false, "cmd": [ "loginctl", "show-user", "--value", "-p", "State", "user_quadlet_pod" ], "delta": "0:00:00.077617", "end": "2025-02-15 11:44:54.734083", "failed_when_result": false, "rc": 1, "start": "2025-02-15 11:44:54.656466" } STDERR: Failed to get user: User ID 2223 is not logged in or lingering MSG: non-zero return code TASK [fedora.linux_system_roles.podman : Restart logind] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:92 Saturday 15 February 2025 11:44:54 -0500 (0:00:00.573) 0:01:35.291 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__user_state is failed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 15 February 2025 11:44:54 -0500 (0:00:00.061) 0:01:35.352 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.054) 0:01:35.407 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:109 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.090) 0:01:35.498 ***** ok: [managed-node3] => { "changed": false } MSG: All assertions passed TASK [Ensure no linger] ******************************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:120 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.101) 0:01:35.599 ***** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [Cleanup user] ************************************************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:159 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.424) 0:01:36.023 ***** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.109) 0:01:36.133 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.061) 0:01:36.194 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.045) 0:01:36.240 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.035) 0:01:36.276 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.039) 0:01:36.315 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.049) 0:01:36.364 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.063) 0:01:36.428 ***** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [managed-node3] => (item=Fedora.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.122) 0:01:36.550 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 15 February 2025 11:44:57 -0500 (0:00:01.008) 0:01:37.558 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.038) 0:01:37.597 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.040) 0:01:37.637 ***** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.035) 0:01:37.673 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.038) 0:01:37.712 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.035) 0:01:37.747 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.029608", "end": "2025-02-15 11:44:57.711200", "rc": 0, "start": "2025-02-15 11:44:57.681592" } STDOUT: podman version 5.3.2 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.430) 0:01:38.178 ***** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.039) 0:01:38.218 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.038) 0:01:38.256 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.042) 0:01:38.299 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.047) 0:01:38.346 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 15 February 2025 11:44:58 -0500 (0:00:00.062) 0:01:38.409 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 15 February 2025 11:44:58 -0500 (0:00:00.062) 0:01:38.471 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:44:58 -0500 (0:00:00.073) 0:01:38.545 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:44:58 -0500 (0:00:00.044) 0:01:38.589 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:44:58 -0500 (0:00:00.041) 0:01:38.631 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:44:58 -0500 (0:00:00.048) 0:01:38.679 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:44:58 -0500 (0:00:00.410) 0:01:39.090 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004289", "end": "2025-02-15 11:44:59.040171", "rc": 0, "start": "2025-02-15 11:44:59.035882" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:44:59 -0500 (0:00:00.416) 0:01:39.506 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006238", "end": "2025-02-15 11:44:59.449801", "rc": 0, "start": "2025-02-15 11:44:59.443563" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:44:59 -0500 (0:00:00.408) 0:01:39.914 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:44:59 -0500 (0:00:00.064) 0:01:39.979 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:44:59 -0500 (0:00:00.091) 0:01:40.070 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:44:59 -0500 (0:00:00.039) 0:01:40.110 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:44:59 -0500 (0:00:00.044) 0:01:40.155 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:44:59 -0500 (0:00:00.043) 0:01:40.198 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 15 February 2025 11:44:59 -0500 (0:00:00.056) 0:01:40.255 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/home/user_quadlet_pod/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/home/user_quadlet_pod/.config/containers/policy.json", "__podman_registries_conf_file": "/home/user_quadlet_pod/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/home/user_quadlet_pod/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 15 February 2025 11:44:59 -0500 (0:00:00.077) 0:01:40.332 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.119) 0:01:40.452 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.056) 0:01:40.509 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.041) 0:01:40.550 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.084) 0:01:40.635 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.046) 0:01:40.681 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.038) 0:01:40.720 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.071) 0:01:40.792 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.042) 0:01:40.834 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.040) 0:01:40.875 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.121) 0:01:40.997 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.065) 0:01:41.062 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.058) 0:01:41.121 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.063) 0:01:41.184 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.046) 0:01:41.231 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.045) 0:01:41.276 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.042) 0:01:41.318 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 15 February 2025 11:45:00 -0500 (0:00:00.041) 0:01:41.360 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.036) 0:01:41.397 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.035) 0:01:41.433 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.032) 0:01:41.466 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.031) 0:01:41.498 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.109) 0:01:41.607 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.078) 0:01:41.685 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.053) 0:01:41.739 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.043) 0:01:41.783 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.068) 0:01:41.851 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.074) 0:01:41.926 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.043) 0:01:41.969 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.046) 0:01:42.016 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:45:01 -0500 (0:00:00.050) 0:01:42.067 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:45:02 -0500 (0:00:00.414) 0:01:42.481 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004272", "end": "2025-02-15 11:45:02.438305", "rc": 0, "start": "2025-02-15 11:45:02.434033" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:45:02 -0500 (0:00:00.421) 0:01:42.903 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006492", "end": "2025-02-15 11:45:02.861964", "rc": 0, "start": "2025-02-15 11:45:02.855472" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.504) 0:01:43.407 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.059) 0:01:43.466 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.043) 0:01:43.509 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.043) 0:01:43.552 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.047) 0:01:43.600 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.040) 0:01:43.641 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.039) 0:01:43.680 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.063) 0:01:43.744 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.045) 0:01:43.790 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.040) 0:01:43.830 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.127) 0:01:43.958 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.075) 0:01:44.034 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 15 February 2025 11:45:03 -0500 (0:00:00.149) 0:01:44.183 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 15 February 2025 11:45:04 -0500 (0:00:00.441) 0:01:44.625 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 15 February 2025 11:45:04 -0500 (0:00:00.054) 0:01:44.679 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 15 February 2025 11:45:04 -0500 (0:00:00.423) 0:01:45.103 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 15 February 2025 11:45:04 -0500 (0:00:00.042) 0:01:45.145 ***** [WARNING]: sftp transfer mechanism failed on [10.31.13.76]. Use ANSIBLE_DEBUG=1 to see detailed information ok: [managed-node3] => { "changed": false, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 15 February 2025 11:45:06 -0500 (0:00:01.807) 0:01:46.953 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 15 February 2025 11:45:06 -0500 (0:00:00.043) 0:01:46.997 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 15 February 2025 11:45:06 -0500 (0:00:00.056) 0:01:47.053 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 15 February 2025 11:45:06 -0500 (0:00:00.073) 0:01:47.127 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 15 February 2025 11:45:06 -0500 (0:00:00.042) 0:01:47.170 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 15 February 2025 11:45:06 -0500 (0:00:00.043) 0:01:47.214 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:45:06 -0500 (0:00:00.084) 0:01:47.299 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:45:06 -0500 (0:00:00.039) 0:01:47.338 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:45:06 -0500 (0:00:00.038) 0:01:47.376 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.046) 0:01:47.423 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.039) 0:01:47.463 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.039) 0:01:47.503 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.039) 0:01:47.542 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.043) 0:01:47.585 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.039) 0:01:47.624 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.039) 0:01:47.664 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.038) 0:01:47.702 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.038) 0:01:47.741 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.047) 0:01:47.788 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.047) 0:01:47.835 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.037) 0:01:47.873 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.057) 0:01:47.931 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.068) 0:01:47.999 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.043) 0:01:48.042 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.094) 0:01:48.136 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:45:07 -0500 (0:00:00.052) 0:01:48.189 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:45:08 -0500 (0:00:00.412) 0:01:48.601 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004897", "end": "2025-02-15 11:45:08.547183", "rc": 0, "start": "2025-02-15 11:45:08.542286" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:45:08 -0500 (0:00:00.411) 0:01:49.013 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006418", "end": "2025-02-15 11:45:08.961749", "rc": 0, "start": "2025-02-15 11:45:08.955331" } STDOUT: 0: user_quadlet_pod 720896 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.418) 0:01:49.432 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 720896 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.056) 0:01:49.488 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.039) 0:01:49.528 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.038) 0:01:49.566 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.042) 0:01:49.609 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.039) 0:01:49.648 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.038) 0:01:49.687 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.060) 0:01:49.747 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.042) 0:01:49.790 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.034) 0:01:49.825 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.082) 0:01:49.907 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.045) 0:01:49.953 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.124) 0:01:50.078 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 15 February 2025 11:45:10 -0500 (0:00:00.458) 0:01:50.536 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 15 February 2025 11:45:10 -0500 (0:00:00.062) 0:01:50.599 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 15 February 2025 11:45:10 -0500 (0:00:00.422) 0:01:51.021 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 15 February 2025 11:45:10 -0500 (0:00:00.043) 0:01:51.065 ***** ok: [managed-node3] => { "changed": false, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.467) 0:01:51.533 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.067) 0:01:51.600 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.072) 0:01:51.672 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.083) 0:01:51.756 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.059) 0:01:51.816 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.066) 0:01:51.882 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.120) 0:01:52.003 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.061) 0:01:52.064 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.065) 0:01:52.129 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.070) 0:01:52.200 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.047) 0:01:52.248 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.045) 0:01:52.294 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.044) 0:01:52.339 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.056) 0:01:52.395 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.039) 0:01:52.434 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.038) 0:01:52.473 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.037) 0:01:52.510 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.039) 0:01:52.549 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml for managed-node3 => (item=user_quadlet_pod) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:4 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.093) 0:01:52.643 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_linger_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set cancel linger vars] *************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:11 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.045) 0:01:52.688 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:16 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.098) 0:01:52.787 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Gather facts for containers] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:21 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.406) 0:01:53.193 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather facts for networks] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:30 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.042) 0:01:53.235 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather secrets] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:40 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.040) 0:01:53.276 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger if no more resources are in use] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:50 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.046) 0:01:53.322 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.034) 0:01:53.357 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop logind] ************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:76 Saturday 15 February 2025 11:45:13 -0500 (0:00:00.043) 0:01:53.400 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:81 Saturday 15 February 2025 11:45:13 -0500 (0:00:00.035) 0:01:53.436 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart logind] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:92 Saturday 15 February 2025 11:45:13 -0500 (0:00:00.038) 0:01:53.475 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 15 February 2025 11:45:13 -0500 (0:00:00.035) 0:01:53.510 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 15 February 2025 11:45:13 -0500 (0:00:00.037) 0:01:53.547 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Remove test user] ******************************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:168 Saturday 15 February 2025 11:45:13 -0500 (0:00:00.088) 0:01:53.636 ***** changed: [managed-node3] => { "changed": true, "force": false, "name": "user_quadlet_pod", "remove": false, "state": "absent" } TASK [Cleanup system - root] *************************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:173 Saturday 15 February 2025 11:45:13 -0500 (0:00:00.618) 0:01:54.254 ***** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 15 February 2025 11:45:14 -0500 (0:00:00.146) 0:01:54.401 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 15 February 2025 11:45:14 -0500 (0:00:00.074) 0:01:54.475 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 15 February 2025 11:45:14 -0500 (0:00:00.046) 0:01:54.522 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 15 February 2025 11:45:14 -0500 (0:00:00.040) 0:01:54.562 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 15 February 2025 11:45:14 -0500 (0:00:00.037) 0:01:54.599 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 15 February 2025 11:45:14 -0500 (0:00:00.036) 0:01:54.636 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 15 February 2025 11:45:14 -0500 (0:00:00.039) 0:01:54.676 ***** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [managed-node3] => (item=Fedora.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=Fedora_41.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_41.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 15 February 2025 11:45:14 -0500 (0:00:00.115) 0:01:54.791 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 15 February 2025 11:45:15 -0500 (0:00:00.991) 0:01:55.782 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 15 February 2025 11:45:15 -0500 (0:00:00.046) 0:01:55.829 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 15 February 2025 11:45:15 -0500 (0:00:00.045) 0:01:55.874 ***** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 15 February 2025 11:45:15 -0500 (0:00:00.035) 0:01:55.910 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 15 February 2025 11:45:15 -0500 (0:00:00.035) 0:01:55.945 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 15 February 2025 11:45:15 -0500 (0:00:00.040) 0:01:55.986 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.030287", "end": "2025-02-15 11:45:15.949528", "rc": 0, "start": "2025-02-15 11:45:15.919241" } STDOUT: podman version 5.3.2 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 15 February 2025 11:45:16 -0500 (0:00:00.448) 0:01:56.434 ***** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 15 February 2025 11:45:16 -0500 (0:00:00.042) 0:01:56.477 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 15 February 2025 11:45:16 -0500 (0:00:00.034) 0:01:56.512 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 15 February 2025 11:45:16 -0500 (0:00:00.046) 0:01:56.558 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 15 February 2025 11:45:16 -0500 (0:00:00.046) 0:01:56.604 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 15 February 2025 11:45:16 -0500 (0:00:00.058) 0:01:56.663 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 15 February 2025 11:45:16 -0500 (0:00:00.084) 0:01:56.748 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:45:16 -0500 (0:00:00.119) 0:01:56.868 ***** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:45:16 -0500 (0:00:00.497) 0:01:57.365 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.043) 0:01:57.409 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.075) 0:01:57.484 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.426) 0:01:57.911 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.055) 0:01:57.966 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.063) 0:01:58.029 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.046) 0:01:58.076 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.043) 0:01:58.120 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.048) 0:01:58.169 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.041) 0:01:58.210 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.037) 0:01:58.248 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.036) 0:01:58.285 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.050) 0:01:58.336 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.068) 0:01:58.405 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.042) 0:01:58.447 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.065) 0:01:58.513 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.096) 0:01:58.610 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.043) 0:01:58.654 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.046) 0:01:58.701 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.075) 0:01:58.776 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.038) 0:01:58.815 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.039) 0:01:58.854 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.071) 0:01:58.925 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.041) 0:01:58.967 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.061) 0:01:59.029 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.057) 0:01:59.086 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.042) 0:01:59.128 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.041) 0:01:59.170 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.048) 0:01:59.218 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.040) 0:01:59.259 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.032) 0:01:59.291 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.032) 0:01:59.324 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.036) 0:01:59.360 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.032) 0:01:59.392 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.122) 0:01:59.514 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.082) 0:01:59.596 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.053) 0:01:59.650 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.045) 0:01:59.696 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.065) 0:01:59.761 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.134) 0:01:59.896 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.045) 0:01:59.941 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.043) 0:01:59.985 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.078) 0:02:00.063 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.426) 0:02:00.490 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.045) 0:02:00.536 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.038) 0:02:00.574 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.042) 0:02:00.617 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.038) 0:02:00.655 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.038) 0:02:00.693 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.038) 0:02:00.732 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.042) 0:02:00.775 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.038) 0:02:00.813 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.062) 0:02:00.876 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.039) 0:02:00.916 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.038) 0:02:00.955 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.080) 0:02:01.036 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.043) 0:02:01.080 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.086) 0:02:01.166 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.036) 0:02:01.203 ***** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-container.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-02-15 11:43:40 EST", "ActiveEnterTimestampMonotonic": "1010152823", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "quadlet-pod-pod-pod.service -.mount system.slice sysinit.target basic.target systemd-journald.socket network-online.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-02-15 11:43:39 EST", "AssertTimestampMonotonic": "1009934597", "Before": "shutdown.target multi-user.target", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "100594000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-02-15 11:43:39 EST", "ConditionTimestampMonotonic": "1009934593", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-pod-container.service", "ControlGroupId": "12618", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3893923840", "EffectiveMemoryMax": "3893923840", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "4414", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "66926", "ExecMainStartTimestamp": "Sat 2025-02-15 11:43:40 EST", "ExecMainStartTimestampMonotonic": "1010152741", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[Sat 2025-02-15 11:43:39 EST] ; stop_time=[n/a] ; pid=66915 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[Sat 2025-02-15 11:43:39 EST] ; stop_time=[n/a] ; pid=66915 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-02-15 11:43:39 EST", "InactiveExitTimestampMonotonic": "1009946161", "InvocationID": "775bf27baf4548b6b2762dd31d2ebe7b", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14714", "LimitNPROCSoft": "14714", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14714", "LimitSIGPENDINGSoft": "14714", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "66926", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3458621440", "MemoryCurrent": "892928", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "17666048", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-02-15 11:43:40 EST", "StateChangeTimestampMonotonic": "1010152823", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "4414", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target quadlet-pod-pod-pod.service", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 15 February 2025 11:45:32 -0500 (0:00:11.257) 0:02:12.460 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637820.6745653, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "ctime": 1739637818.3965838, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 278940, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1739637818.0825863, "nlink": 1, "path": "/etc/containers/systemd/quadlet-pod-container.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 230, "uid": 0, "version": "3809355391", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 15 February 2025 11:45:32 -0500 (0:00:00.415) 0:02:12.876 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 15 February 2025 11:45:32 -0500 (0:00:00.070) 0:02:12.946 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 15 February 2025 11:45:32 -0500 (0:00:00.396) 0:02:13.343 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 15 February 2025 11:45:33 -0500 (0:00:00.055) 0:02:13.399 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 15 February 2025 11:45:33 -0500 (0:00:00.040) 0:02:13.439 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 15 February 2025 11:45:33 -0500 (0:00:00.037) 0:02:13.477 ***** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/quadlet-pod-container.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 15 February 2025 11:45:33 -0500 (0:00:00.408) 0:02:13.886 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 15 February 2025 11:45:34 -0500 (0:00:00.839) 0:02:14.725 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 15 February 2025 11:45:34 -0500 (0:00:00.508) 0:02:15.234 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 15 February 2025 11:45:34 -0500 (0:00:00.085) 0:02:15.320 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 15 February 2025 11:45:34 -0500 (0:00:00.061) 0:02:15.381 ***** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.064373", "end": "2025-02-15 11:45:35.399991", "rc": 0, "start": "2025-02-15 11:45:35.335618" } STDOUT: 8ef1ba6479f3fe87041b9f276362ded980e0141933314d7df324f2e170651411 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 15 February 2025 11:45:35 -0500 (0:00:00.484) 0:02:15.865 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:45:35 -0500 (0:00:00.089) 0:02:15.955 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:45:35 -0500 (0:00:00.036) 0:02:15.992 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:45:35 -0500 (0:00:00.040) 0:02:16.032 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 15 February 2025 11:45:35 -0500 (0:00:00.037) 0:02:16.070 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.033679", "end": "2025-02-15 11:45:36.052819", "rc": 0, "start": "2025-02-15 11:45:36.019140" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 15 February 2025 11:45:36 -0500 (0:00:00.451) 0:02:16.521 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.035036", "end": "2025-02-15 11:45:36.499191", "rc": 0, "start": "2025-02-15 11:45:36.464155" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 15 February 2025 11:45:36 -0500 (0:00:00.445) 0:02:16.967 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.032829", "end": "2025-02-15 11:45:36.945396", "rc": 0, "start": "2025-02-15 11:45:36.912567" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 15 February 2025 11:45:37 -0500 (0:00:00.445) 0:02:17.412 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.036661", "end": "2025-02-15 11:45:37.395731", "rc": 0, "start": "2025-02-15 11:45:37.359070" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 15 February 2025 11:45:37 -0500 (0:00:00.513) 0:02:17.926 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 15 February 2025 11:45:37 -0500 (0:00:00.447) 0:02:18.373 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 15 February 2025 11:45:38 -0500 (0:00:00.440) 0:02:18.813 ***** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "running", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf5-makecache.service": { "name": "dnf5-makecache.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction-cleanup.service": { "name": "dnf5-offline-transaction-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction.service": { "name": "dnf5-offline-transaction.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-pod-pod-pod.service": { "name": "quadlet-pod-pod-pod.service", "source": "systemd", "state": "inactive", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-bsod.service": { "name": "systemd-bsod.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed-firstboot.service": { "name": "systemd-homed-firstboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-mountfsd.service": { "name": "systemd-mountfsd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-persistent-storage.service": { "name": "systemd-networkd-persistent-storage.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-nsresourced.service": { "name": "systemd-nsresourced.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-storagetm.service": { "name": "systemd-storagetm.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:45:41 -0500 (0:00:02.812) 0:02:21.626 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:45:41 -0500 (0:00:00.037) 0:02:21.663 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:45:41 -0500 (0:00:00.048) 0:02:21.711 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:45:41 -0500 (0:00:00.045) 0:02:21.756 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:45:41 -0500 (0:00:00.048) 0:02:21.805 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:45:41 -0500 (0:00:00.055) 0:02:21.860 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:45:41 -0500 (0:00:00.067) 0:02:21.928 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:45:41 -0500 (0:00:00.045) 0:02:21.974 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:45:41 -0500 (0:00:00.040) 0:02:22.014 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:45:41 -0500 (0:00:00.049) 0:02:22.063 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637487.0243824, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "0c228ad086513530aab958732f1fb01238bc39b0", "ctime": 1739637436.1118102, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 192287, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1728518400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "305858810", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.410) 0:02:22.474 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.042) 0:02:22.517 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.038) 0:02:22.556 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.038) 0:02:22.594 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.037) 0:02:22.632 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.041) 0:02:22.674 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.037) 0:02:22.711 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.037) 0:02:22.749 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.039) 0:02:22.788 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.072) 0:02:22.860 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.039) 0:02:22.900 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.034) 0:02:22.935 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.079) 0:02:23.014 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.048) 0:02:23.062 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.082) 0:02:23.144 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 15 February 2025 11:45:42 -0500 (0:00:00.036) 0:02:23.181 ***** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-pod-pod.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket system.slice network-online.target basic.target -.mount sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3893923840", "EffectiveMemoryMax": "3893923840", "EffectiveTasksMax": "4414", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14714", "LimitNPROCSoft": "14714", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14714", "LimitSIGPENDINGSoft": "14714", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3477962752", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice -.mount sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4414", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 15 February 2025 11:45:43 -0500 (0:00:00.866) 0:02:24.047 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637821.0425625, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "ctime": 1739637811.3316405, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 278886, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1739637810.8846443, "nlink": 1, "path": "/etc/containers/systemd/quadlet-pod-pod.pod", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 70, "uid": 0, "version": "992959968", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 15 February 2025 11:45:44 -0500 (0:00:00.417) 0:02:24.465 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 15 February 2025 11:45:44 -0500 (0:00:00.065) 0:02:24.530 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 15 February 2025 11:45:44 -0500 (0:00:00.399) 0:02:24.929 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 15 February 2025 11:45:44 -0500 (0:00:00.054) 0:02:24.984 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 15 February 2025 11:45:44 -0500 (0:00:00.036) 0:02:25.021 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 15 February 2025 11:45:44 -0500 (0:00:00.035) 0:02:25.057 ***** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/quadlet-pod-pod.pod", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 15 February 2025 11:45:45 -0500 (0:00:00.415) 0:02:25.472 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 15 February 2025 11:45:45 -0500 (0:00:00.809) 0:02:26.282 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 15 February 2025 11:45:46 -0500 (0:00:00.463) 0:02:26.746 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 15 February 2025 11:45:46 -0500 (0:00:00.048) 0:02:26.795 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 15 February 2025 11:45:46 -0500 (0:00:00.099) 0:02:26.894 ***** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.033943", "end": "2025-02-15 11:45:46.872008", "rc": 0, "start": "2025-02-15 11:45:46.838065" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 15 February 2025 11:45:46 -0500 (0:00:00.444) 0:02:27.339 ***** included: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:45:47 -0500 (0:00:00.065) 0:02:27.405 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:45:47 -0500 (0:00:00.039) 0:02:27.444 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:45:47 -0500 (0:00:00.036) 0:02:27.480 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 15 February 2025 11:45:47 -0500 (0:00:00.035) 0:02:27.515 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.035214", "end": "2025-02-15 11:45:47.487762", "rc": 0, "start": "2025-02-15 11:45:47.452548" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 15 February 2025 11:45:47 -0500 (0:00:00.439) 0:02:27.955 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.035867", "end": "2025-02-15 11:45:47.932000", "rc": 0, "start": "2025-02-15 11:45:47.896133" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 15 February 2025 11:45:48 -0500 (0:00:00.449) 0:02:28.404 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.034137", "end": "2025-02-15 11:45:48.381945", "rc": 0, "start": "2025-02-15 11:45:48.347808" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 15 February 2025 11:45:48 -0500 (0:00:00.444) 0:02:28.849 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.034907", "end": "2025-02-15 11:45:48.836451", "rc": 0, "start": "2025-02-15 11:45:48.801544" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 15 February 2025 11:45:48 -0500 (0:00:00.454) 0:02:29.303 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 15 February 2025 11:45:49 -0500 (0:00:00.440) 0:02:29.743 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 15 February 2025 11:45:49 -0500 (0:00:00.449) 0:02:30.193 ***** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf5-makecache.service": { "name": "dnf5-makecache.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction-cleanup.service": { "name": "dnf5-offline-transaction-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf5-offline-transaction.service": { "name": "dnf5-offline-transaction.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-bsod.service": { "name": "systemd-bsod.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed-firstboot.service": { "name": "systemd-homed-firstboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-mountfsd.service": { "name": "systemd-mountfsd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-persistent-storage.service": { "name": "systemd-networkd-persistent-storage.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-nsresourced.service": { "name": "systemd-nsresourced.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-storagetm.service": { "name": "systemd-storagetm.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:45:52 -0500 (0:00:02.814) 0:02:33.007 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 15 February 2025 11:45:52 -0500 (0:00:00.035) 0:02:33.043 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 15 February 2025 11:45:52 -0500 (0:00:00.031) 0:02:33.075 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 15 February 2025 11:45:52 -0500 (0:00:00.036) 0:02:33.111 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:182 Saturday 15 February 2025 11:45:52 -0500 (0:00:00.052) 0:02:33.163 ***** fatal: [managed-node3]: FAILED! => { "assertion": "__podman_test_debug_secrets.stdout == \"\"", "changed": false, "evaluated_to": false } MSG: Assertion failed TASK [Dump journal] ************************************************************ task path: /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:194 Saturday 15 February 2025 11:45:52 -0500 (0:00:00.047) 0:02:33.211 ***** fatal: [managed-node3]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.111721", "end": "2025-02-15 11:45:53.253450", "failed_when_result": true, "rc": 0, "start": "2025-02-15 11:45:53.141729" } STDOUT: Feb 15 11:45:32 managed-node3 audit[86532]: CRYPTO_KEY_USER pid=86532 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86532 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:32 managed-node3 audit[86556]: CRYPTO_KEY_USER pid=86556 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86556 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:32 managed-node3 audit[86582]: CRYPTO_KEY_USER pid=86582 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86582 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:32 managed-node3 audit[86609]: CRYPTO_KEY_USER pid=86609 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86609 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:32 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[86634]: CRYPTO_KEY_USER pid=86634 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86634 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[86658]: CRYPTO_KEY_USER pid=86658 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86658 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[86688]: CRYPTO_KEY_USER pid=86688 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86688 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[86712]: CRYPTO_KEY_USER pid=86712 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86712 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:33 managed-node3 audit[86737]: CRYPTO_KEY_USER pid=86737 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86737 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:33 managed-node3 python3[86763]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-pod-container.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:45:33 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[86764]: CRYPTO_KEY_USER pid=86764 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86764 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[86789]: CRYPTO_KEY_USER pid=86789 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86789 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[86813]: CRYPTO_KEY_USER pid=86813 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86813 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[86843]: CRYPTO_KEY_USER pid=86843 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86843 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[86867]: CRYPTO_KEY_USER pid=86867 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86867 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:33 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:33 managed-node3 audit[86892]: CRYPTO_KEY_USER pid=86892 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86892 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:33 managed-node3 python3[86918]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:45:33 managed-node3 systemd[1]: Reload requested from client PID 86919 ('systemctl') (unit session-11.scope)... Feb 15 11:45:33 managed-node3 systemd[1]: Reloading... Feb 15 11:45:34 managed-node3 systemd[1]: Reloading finished in 244 ms. Feb 15 11:45:34 managed-node3 audit: BPF prog-id=266 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=253 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=267 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=268 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=246 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=247 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=269 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=245 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=270 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=248 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=271 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=257 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=272 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=273 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=258 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=259 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=274 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=250 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=275 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=276 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=251 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=252 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=277 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=249 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=278 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=260 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=279 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=280 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=261 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=262 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=281 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=263 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=282 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=283 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=264 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=265 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=284 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=254 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=285 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=286 op=LOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=255 op=UNLOAD Feb 15 11:45:34 managed-node3 audit: BPF prog-id=256 op=UNLOAD Feb 15 11:45:34 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[86972]: CRYPTO_KEY_USER pid=86972 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86972 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[86998]: CRYPTO_KEY_USER pid=86998 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=86998 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[87022]: CRYPTO_KEY_USER pid=87022 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87022 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[87052]: CRYPTO_KEY_USER pid=87052 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87052 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[87076]: CRYPTO_KEY_USER pid=87076 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87076 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:34 managed-node3 audit[87101]: CRYPTO_KEY_USER pid=87101 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87101 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:34 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:34 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[87135]: CRYPTO_KEY_USER pid=87135 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87135 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:34 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[87160]: CRYPTO_KEY_USER pid=87160 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87160 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[87184]: CRYPTO_KEY_USER pid=87184 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87184 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[87214]: CRYPTO_KEY_USER pid=87214 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87214 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[87238]: CRYPTO_KEY_USER pid=87238 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87238 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:35 managed-node3 audit[87263]: CRYPTO_KEY_USER pid=87263 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87263 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:35 managed-node3 python3[87289]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:35 managed-node3 podman[87290]: 2025-02-15 11:45:35.378767388 -0500 EST m=+0.033642134 image untag 8ef1ba6479f3fe87041b9f276362ded980e0141933314d7df324f2e170651411 localhost/podman-pause:5.3.2-1737504000 Feb 15 11:45:35 managed-node3 podman[87290]: 2025-02-15 11:45:35.367254399 -0500 EST m=+0.022129275 image remove 8ef1ba6479f3fe87041b9f276362ded980e0141933314d7df324f2e170651411 Feb 15 11:45:35 managed-node3 podman[87290]: 2025-02-15 11:45:35.395364238 -0500 EST m=+0.050238997 image untag 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Feb 15 11:45:35 managed-node3 podman[87290]: 2025-02-15 11:45:35.378775717 -0500 EST m=+0.033650459 image remove 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f Feb 15 11:45:35 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:35 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[87298]: CRYPTO_KEY_USER pid=87298 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87298 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[87323]: CRYPTO_KEY_USER pid=87323 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87323 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[87347]: CRYPTO_KEY_USER pid=87347 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87347 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[87377]: CRYPTO_KEY_USER pid=87377 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87377 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[87401]: CRYPTO_KEY_USER pid=87401 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87401 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:35 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:35 managed-node3 audit[87426]: CRYPTO_KEY_USER pid=87426 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87426 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:36 managed-node3 python3[87452]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:36 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[87460]: CRYPTO_KEY_USER pid=87460 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87460 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[87485]: CRYPTO_KEY_USER pid=87485 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87485 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[87509]: CRYPTO_KEY_USER pid=87509 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87509 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[87539]: CRYPTO_KEY_USER pid=87539 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87539 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[87563]: CRYPTO_KEY_USER pid=87563 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87563 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:36 managed-node3 audit[87588]: CRYPTO_KEY_USER pid=87588 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87588 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:36 managed-node3 python3[87614]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:36 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[87623]: CRYPTO_KEY_USER pid=87623 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87623 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[87648]: CRYPTO_KEY_USER pid=87648 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87648 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[87672]: CRYPTO_KEY_USER pid=87672 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87672 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[87702]: CRYPTO_KEY_USER pid=87702 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87702 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[87726]: CRYPTO_KEY_USER pid=87726 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87726 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:36 managed-node3 audit[87751]: CRYPTO_KEY_USER pid=87751 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87751 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:36 managed-node3 python3[87777]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[87785]: CRYPTO_KEY_USER pid=87785 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87785 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:36 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[87810]: CRYPTO_KEY_USER pid=87810 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87810 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[87834]: CRYPTO_KEY_USER pid=87834 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87834 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[87864]: CRYPTO_KEY_USER pid=87864 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87864 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[87888]: CRYPTO_KEY_USER pid=87888 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87888 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:37 managed-node3 audit[87913]: CRYPTO_KEY_USER pid=87913 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87913 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:37 managed-node3 python3[87939]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:37 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:37 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[87948]: CRYPTO_KEY_USER pid=87948 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87948 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[87973]: CRYPTO_KEY_USER pid=87973 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87973 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[87997]: CRYPTO_KEY_USER pid=87997 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=87997 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[88027]: CRYPTO_KEY_USER pid=88027 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88027 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[88051]: CRYPTO_KEY_USER pid=88051 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88051 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:37 managed-node3 audit[88076]: CRYPTO_KEY_USER pid=88076 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88076 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:37 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:37 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[88111]: CRYPTO_KEY_USER pid=88111 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88111 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:37 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[88136]: CRYPTO_KEY_USER pid=88136 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88136 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[88160]: CRYPTO_KEY_USER pid=88160 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88160 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[88190]: CRYPTO_KEY_USER pid=88190 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88190 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[88214]: CRYPTO_KEY_USER pid=88214 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88214 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:38 managed-node3 audit[88239]: CRYPTO_KEY_USER pid=88239 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88239 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:38 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:38 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[88274]: CRYPTO_KEY_USER pid=88274 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88274 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[88299]: CRYPTO_KEY_USER pid=88299 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88299 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[88323]: CRYPTO_KEY_USER pid=88323 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88323 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[88353]: CRYPTO_KEY_USER pid=88353 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88353 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[88377]: CRYPTO_KEY_USER pid=88377 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88377 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:38 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:38 managed-node3 audit[88402]: CRYPTO_KEY_USER pid=88402 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88402 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:38 managed-node3 python3[88428]: ansible-service_facts Invoked Feb 15 11:45:39 managed-node3 systemd[1]: /usr/lib/systemd/system/lvm-devices-import.service:8: Unknown key 'ConditionPathExists' in section [Service], ignoring. Feb 15 11:45:41 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[88566]: CRYPTO_KEY_USER pid=88566 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88566 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[88591]: CRYPTO_KEY_USER pid=88591 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88591 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[88615]: CRYPTO_KEY_USER pid=88615 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88615 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[88645]: CRYPTO_KEY_USER pid=88645 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88645 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[88669]: CRYPTO_KEY_USER pid=88669 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88669 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:41 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:41 managed-node3 audit[88694]: CRYPTO_KEY_USER pid=88694 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88694 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:41 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Feb 15 11:45:41 managed-node3 audit[1]: SERVICE_STOP pid=1 uid=0 auid=4294967295 ses=4294967295 subj=system_u:system_r:init_t:s0 msg='unit=NetworkManager-dispatcher comm="systemd" exe="/usr/lib/systemd/systemd" hostname=? addr=? terminal=? res=success' Feb 15 11:45:42 managed-node3 python3[88721]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:45:42 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[88724]: CRYPTO_KEY_USER pid=88724 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88724 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[88749]: CRYPTO_KEY_USER pid=88749 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88749 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[88773]: CRYPTO_KEY_USER pid=88773 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88773 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[88803]: CRYPTO_KEY_USER pid=88803 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88803 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[88827]: CRYPTO_KEY_USER pid=88827 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88827 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:42 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:42 managed-node3 audit[88852]: CRYPTO_KEY_USER pid=88852 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88852 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:43 managed-node3 python3[88878]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Feb 15 11:45:43 managed-node3 systemd[1]: Reload requested from client PID 88881 ('systemctl') (unit session-11.scope)... Feb 15 11:45:43 managed-node3 systemd[1]: Reloading... Feb 15 11:45:43 managed-node3 systemd[1]: Reloading finished in 232 ms. Feb 15 11:45:43 managed-node3 audit: BPF prog-id=287 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=269 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=288 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=266 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=289 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=277 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=290 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=271 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=291 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=292 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=272 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=273 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=293 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=294 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=267 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=268 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=295 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=270 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=296 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=274 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=297 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=298 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=275 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=276 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=299 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=278 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=300 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=301 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=279 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=280 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=302 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=284 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=303 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=304 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=285 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=286 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=305 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=281 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=306 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=307 op=LOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=282 op=UNLOAD Feb 15 11:45:43 managed-node3 audit: BPF prog-id=283 op=UNLOAD Feb 15 11:45:43 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[88934]: CRYPTO_KEY_USER pid=88934 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88934 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[88959]: CRYPTO_KEY_USER pid=88959 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88959 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[88983]: CRYPTO_KEY_USER pid=88983 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=88983 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[89013]: CRYPTO_KEY_USER pid=89013 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89013 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[89037]: CRYPTO_KEY_USER pid=89037 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89037 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:43 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:43 managed-node3 audit[89062]: CRYPTO_KEY_USER pid=89062 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89062 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:43 managed-node3 python3[89088]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:45:44 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[89091]: CRYPTO_KEY_USER pid=89091 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89091 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[89116]: CRYPTO_KEY_USER pid=89116 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89116 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[89140]: CRYPTO_KEY_USER pid=89140 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89140 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[89170]: CRYPTO_KEY_USER pid=89170 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89170 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[89194]: CRYPTO_KEY_USER pid=89194 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89194 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:44 managed-node3 audit[89219]: CRYPTO_KEY_USER pid=89219 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89219 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[89246]: CRYPTO_KEY_USER pid=89246 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89246 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[89271]: CRYPTO_KEY_USER pid=89271 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89271 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[89295]: CRYPTO_KEY_USER pid=89295 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89295 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[89325]: CRYPTO_KEY_USER pid=89325 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89325 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[89349]: CRYPTO_KEY_USER pid=89349 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89349 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:44 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:44 managed-node3 audit[89375]: CRYPTO_KEY_USER pid=89375 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89375 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:45 managed-node3 python3[89401]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:45:45 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[89402]: CRYPTO_KEY_USER pid=89402 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89402 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[89427]: CRYPTO_KEY_USER pid=89427 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89427 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[89451]: CRYPTO_KEY_USER pid=89451 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89451 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[89481]: CRYPTO_KEY_USER pid=89481 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89481 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[89505]: CRYPTO_KEY_USER pid=89505 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89505 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:45 managed-node3 audit[89530]: CRYPTO_KEY_USER pid=89530 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89530 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:45 managed-node3 python3[89556]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:45:45 managed-node3 systemd[1]: Reload requested from client PID 89557 ('systemctl') (unit session-11.scope)... Feb 15 11:45:45 managed-node3 systemd[1]: Reloading... Feb 15 11:45:45 managed-node3 systemd[1]: Reloading finished in 234 ms. Feb 15 11:45:45 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[89610]: CRYPTO_KEY_USER pid=89610 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89610 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[89635]: CRYPTO_KEY_USER pid=89635 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89635 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[89659]: CRYPTO_KEY_USER pid=89659 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89659 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:45 managed-node3 audit[89689]: CRYPTO_KEY_USER pid=89689 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89689 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[89713]: CRYPTO_KEY_USER pid=89713 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89713 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:46 managed-node3 audit[89738]: CRYPTO_KEY_USER pid=89738 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89738 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:46 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:46 managed-node3 audit: BPF prog-id=308 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=299 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=309 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=310 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=300 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=301 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=311 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=296 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=312 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=313 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=297 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=298 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=314 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=290 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=315 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=316 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=291 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=292 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=317 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=289 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=318 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=305 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=319 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=320 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=306 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=307 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=321 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=288 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=322 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=295 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=323 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=287 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=324 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=325 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=293 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=294 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=326 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=302 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=327 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=328 op=LOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=303 op=UNLOAD Feb 15 11:45:46 managed-node3 audit: BPF prog-id=304 op=UNLOAD Feb 15 11:45:46 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[89772]: CRYPTO_KEY_USER pid=89772 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89772 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[89797]: CRYPTO_KEY_USER pid=89797 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89797 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[89821]: CRYPTO_KEY_USER pid=89821 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89821 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[89851]: CRYPTO_KEY_USER pid=89851 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89851 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[89875]: CRYPTO_KEY_USER pid=89875 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89875 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:46 managed-node3 audit[89900]: CRYPTO_KEY_USER pid=89900 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89900 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:46 managed-node3 python3[89926]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:46 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:46 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[89934]: CRYPTO_KEY_USER pid=89934 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89934 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:46 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[89959]: CRYPTO_KEY_USER pid=89959 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89959 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[89983]: CRYPTO_KEY_USER pid=89983 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=89983 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[90013]: CRYPTO_KEY_USER pid=90013 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90013 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[90037]: CRYPTO_KEY_USER pid=90037 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90037 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:47 managed-node3 audit[90062]: CRYPTO_KEY_USER pid=90062 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90062 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:47 managed-node3 python3[90088]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:47 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:47 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[90096]: CRYPTO_KEY_USER pid=90096 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90096 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[90121]: CRYPTO_KEY_USER pid=90121 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90121 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[90145]: CRYPTO_KEY_USER pid=90145 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90145 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[90175]: CRYPTO_KEY_USER pid=90175 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90175 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[90199]: CRYPTO_KEY_USER pid=90199 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90199 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:47 managed-node3 audit[90224]: CRYPTO_KEY_USER pid=90224 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90224 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:47 managed-node3 python3[90250]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:47 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:47 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[90258]: CRYPTO_KEY_USER pid=90258 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90258 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:47 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[90283]: CRYPTO_KEY_USER pid=90283 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90283 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[90307]: CRYPTO_KEY_USER pid=90307 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90307 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[90337]: CRYPTO_KEY_USER pid=90337 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90337 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[90361]: CRYPTO_KEY_USER pid=90361 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90361 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:48 managed-node3 audit[90386]: CRYPTO_KEY_USER pid=90386 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90386 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:48 managed-node3 python3[90412]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[90421]: CRYPTO_KEY_USER pid=90421 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90421 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[90446]: CRYPTO_KEY_USER pid=90446 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90446 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[90470]: CRYPTO_KEY_USER pid=90470 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90470 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[90500]: CRYPTO_KEY_USER pid=90500 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90500 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[90524]: CRYPTO_KEY_USER pid=90524 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90524 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:48 managed-node3 audit[90549]: CRYPTO_KEY_USER pid=90549 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90549 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:48 managed-node3 python3[90575]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:48 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[90584]: CRYPTO_KEY_USER pid=90584 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90584 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[90609]: CRYPTO_KEY_USER pid=90609 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90609 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:48 managed-node3 audit[90633]: CRYPTO_KEY_USER pid=90633 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90633 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[90663]: CRYPTO_KEY_USER pid=90663 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90663 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[90687]: CRYPTO_KEY_USER pid=90687 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90687 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:49 managed-node3 audit[90712]: CRYPTO_KEY_USER pid=90712 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90712 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:49 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[90746]: CRYPTO_KEY_USER pid=90746 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90746 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[90771]: CRYPTO_KEY_USER pid=90771 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90771 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[90795]: CRYPTO_KEY_USER pid=90795 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90795 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[90825]: CRYPTO_KEY_USER pid=90825 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90825 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[90849]: CRYPTO_KEY_USER pid=90849 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90849 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:49 managed-node3 audit[90874]: CRYPTO_KEY_USER pid=90874 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90874 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:49 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[90909]: CRYPTO_KEY_USER pid=90909 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90909 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[90934]: CRYPTO_KEY_USER pid=90934 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90934 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[90958]: CRYPTO_KEY_USER pid=90958 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90958 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[90988]: CRYPTO_KEY_USER pid=90988 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=90988 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[91012]: CRYPTO_KEY_USER pid=91012 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=91012 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:49 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:49 managed-node3 audit[91037]: CRYPTO_KEY_USER pid=91037 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=91037 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:50 managed-node3 python3[91063]: ansible-service_facts Invoked Feb 15 11:45:51 managed-node3 systemd[1]: /usr/lib/systemd/system/lvm-devices-import.service:8: Unknown key 'ConditionPathExists' in section [Service], ignoring. Feb 15 11:45:52 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[91201]: CRYPTO_KEY_USER pid=91201 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=91201 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[91226]: CRYPTO_KEY_USER pid=91226 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=91226 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[91250]: CRYPTO_KEY_USER pid=91250 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=91250 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[91280]: CRYPTO_KEY_USER pid=91280 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=91280 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[91304]: CRYPTO_KEY_USER pid=91304 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=91304 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=? res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_END pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_LOGOUT pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=ssh res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_LOGIN pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:52 managed-node3 audit[83862]: USER_START pid=83862 uid=0 auid=0 ses=11 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.45.73 terminal=/dev/pts/0 res=success' Feb 15 11:45:52 managed-node3 audit[91329]: CRYPTO_KEY_USER pid=91329 uid=0 auid=0 ses=11 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:a7:1a:b8:89:56:ce:0d:71:da:47:38:92:0c:ec:c4:17:4b:e8:f9:ef:6b:5f:50:de:63:4f:25:dd:47:51:ef:b8 direction=? spid=91329 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node3 addr=10.31.45.73 terminal=pts/0 res=success' Feb 15 11:45:53 managed-node3 python3[91355]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node3 : ok=371 changed=28 unreachable=0 failed=1 skipped=391 rescued=1 ignored=1 TASKS RECAP ******************************************************************** Saturday 15 February 2025 11:45:53 -0500 (0:00:00.506) 0:02:33.717 ***** =============================================================================== fedora.linux_system_roles.podman : Stop and disable service ------------ 11.26s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : Stop and disable service ------------ 11.18s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.83s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.81s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.81s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.63s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : Remove quadlet file ------------------ 1.81s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 fedora.linux_system_roles.podman : Ensure container images are present --- 1.63s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Ensure container images are present --- 1.57s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Start service ------------------------ 1.54s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Gathering Facts --------------------------------------------------------- 1.49s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.32s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Start service ------------------------ 1.27s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.03s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Reload systemctl --------------------- 1.03s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.01s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.99s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.96s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.92s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Check files ------------------------------------------------------------- 0.89s /tmp/collections-xcg/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:70