ansible-playbook [core 2.17.7] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-BPh executable location = /usr/local/bin/ansible-playbook python version = 3.12.8 (main, Dec 3 2024, 00:00:00) [GCC 14.2.1 20241104 (Red Hat 14.2.1-6)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_pod.yml ************************************************ 2 plays in /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:5 Saturday 11 January 2025 11:33:30 -0500 (0:00:00.007) 0:00:00.007 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-9hc/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role can manage quadlet pods] **************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 Saturday 11 January 2025 11:33:30 -0500 (0:00:00.021) 0:00:00.028 ****** [WARNING]: Platform linux on host managed-node3 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node3] TASK [Run the role - root] ***************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:34 Saturday 11 January 2025 11:33:31 -0500 (0:00:01.196) 0:00:01.225 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 11 January 2025 11:33:32 -0500 (0:00:00.062) 0:00:01.288 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 11 January 2025 11:33:32 -0500 (0:00:00.023) 0:00:01.311 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 11 January 2025 11:33:32 -0500 (0:00:00.033) 0:00:01.345 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 11 January 2025 11:33:32 -0500 (0:00:00.448) 0:00:01.793 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 11 January 2025 11:33:32 -0500 (0:00:00.026) 0:00:01.819 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 11 January 2025 11:33:32 -0500 (0:00:00.352) 0:00:02.171 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 11 January 2025 11:33:32 -0500 (0:00:00.024) 0:00:02.196 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 11 January 2025 11:33:32 -0500 (0:00:00.050) 0:00:02.246 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 11 January 2025 11:33:34 -0500 (0:00:01.098) 0:00:03.345 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.043) 0:00:03.389 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.047) 0:00:03.436 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.042) 0:00:03.479 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.045) 0:00:03.524 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.042) 0:00:03.567 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.023658", "end": "2025-01-11 11:33:34.727631", "rc": 0, "start": "2025-01-11 11:33:34.703973" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.481) 0:00:04.048 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.033) 0:00:04.082 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.030) 0:00:04.113 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.047) 0:00:04.160 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.050) 0:00:04.211 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 11 January 2025 11:33:35 -0500 (0:00:00.045) 0:00:04.256 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 11 January 2025 11:33:35 -0500 (0:00:00.049) 0:00:04.305 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:33:35 -0500 (0:00:00.111) 0:00:04.416 ****** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:33:35 -0500 (0:00:00.465) 0:00:04.882 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:33:35 -0500 (0:00:00.038) 0:00:04.920 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:33:35 -0500 (0:00:00.046) 0:00:04.966 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1736612953.2121177, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612945.376056, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.376) 0:00:05.343 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.036) 0:00:05.379 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.029) 0:00:05.408 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.029) 0:00:05.437 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.028) 0:00:05.466 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.029) 0:00:05.495 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.029) 0:00:05.525 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.031) 0:00:05.556 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.029) 0:00:05.585 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.074) 0:00:05.659 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.077) 0:00:05.737 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.032) 0:00:05.769 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.030) 0:00:05.799 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.063) 0:00:05.863 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.057) 0:00:05.921 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.028) 0:00:05.950 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.053) 0:00:06.003 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.028) 0:00:06.032 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.027) 0:00:06.060 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.079) 0:00:06.139 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.042) 0:00:06.182 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.033) 0:00:06.216 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.035) 0:00:06.252 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.033) 0:00:06.286 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.033) 0:00:06.319 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.029) 0:00:06.349 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.027) 0:00:06.376 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.023) 0:00:06.400 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.024) 0:00:06.424 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.023) 0:00:06.448 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.023) 0:00:06.471 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.083) 0:00:06.555 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.099) 0:00:06.654 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.056) 0:00:06.711 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.046) 0:00:06.757 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.073) 0:00:06.830 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.093) 0:00:06.923 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.037) 0:00:06.961 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.035) 0:00:06.996 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.042) 0:00:07.038 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1736612953.2121177, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612945.376056, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.370) 0:00:07.408 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.030) 0:00:07.439 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.028) 0:00:07.467 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.029) 0:00:07.497 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.027) 0:00:07.524 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.032) 0:00:07.556 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.029) 0:00:07.586 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.028) 0:00:07.615 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.028) 0:00:07.643 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.047) 0:00:07.691 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.037) 0:00:07.728 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.028) 0:00:07.757 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.069) 0:00:07.826 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.064) 0:00:07.891 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.026) 0:00:07.918 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.063) 0:00:07.981 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.049) 0:00:08.031 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.028) 0:00:08.060 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.027) 0:00:08.087 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.027) 0:00:08.115 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.023) 0:00:08.138 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 11 January 2025 11:33:38 -0500 (0:00:00.027) 0:00:08.166 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 11 January 2025 11:33:39 -0500 (0:00:00.466) 0:00:08.632 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 11 January 2025 11:33:39 -0500 (0:00:00.029) 0:00:08.661 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 11 January 2025 11:33:39 -0500 (0:00:00.027) 0:00:08.689 ****** changed: [managed-node3] => { "changed": true, "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "dest": "/etc/containers/systemd/quadlet-pod-pod.pod", "gid": 0, "group": "root", "md5sum": "43c9e9c2ff3ad9cd27c1f2d12f03aee0", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1736613219.4797897-17447-31959509434284/.source.pod", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.870) 0:00:09.559 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 11 January 2025 11:33:41 -0500 (0:00:01.015) 0:00:10.575 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-pod-pod.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice systemd-journald.socket network-online.target sysinit.target basic.target -.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3277721600", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target -.mount", "RequiresMountsFor": "/run/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 11 January 2025 11:33:42 -0500 (0:00:01.209) 0:00:11.785 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.045) 0:00:11.830 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.048) 0:00:11.878 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.046) 0:00:11.925 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.029) 0:00:11.955 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.046) 0:00:12.002 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.099) 0:00:12.101 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.041) 0:00:12.143 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.054) 0:00:12.198 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.050) 0:00:12.248 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1736612953.2121177, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612945.376056, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.474) 0:00:12.722 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.031) 0:00:12.754 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.031) 0:00:12.785 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.038) 0:00:12.824 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.030) 0:00:12.854 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.032) 0:00:12.886 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.038) 0:00:12.925 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.049) 0:00:12.975 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.049) 0:00:13.025 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.070) 0:00:13.095 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.036) 0:00:13.131 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.034) 0:00:13.166 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.081) 0:00:13.248 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:33:44 -0500 (0:00:00.034) 0:00:13.283 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:33:44 -0500 (0:00:00.027) 0:00:13.310 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 11 January 2025 11:33:44 -0500 (0:00:00.092) 0:00:13.403 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:33:44 -0500 (0:00:00.053) 0:00:13.456 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:33:44 -0500 (0:00:00.033) 0:00:13.489 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:33:44 -0500 (0:00:00.032) 0:00:13.522 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 11 January 2025 11:33:44 -0500 (0:00:00.029) 0:00:13.552 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 11 January 2025 11:33:44 -0500 (0:00:00.030) 0:00:13.583 ****** changed: [managed-node3] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 11 January 2025 11:33:47 -0500 (0:00:03.181) 0:00:16.764 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 33, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 11 January 2025 11:33:47 -0500 (0:00:00.388) 0:00:17.153 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 11 January 2025 11:33:47 -0500 (0:00:00.028) 0:00:17.181 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 11 January 2025 11:33:47 -0500 (0:00:00.028) 0:00:17.210 ****** changed: [managed-node3] => { "changed": true, "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "dest": "/etc/containers/systemd/quadlet-pod-container.container", "gid": 0, "group": "root", "md5sum": "daaf6e904ff3c17edeb801084cfe256f", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 230, "src": "/root/.ansible/tmp/ansible-tmp-1736613228.00213-17713-248510374954123/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 11 January 2025 11:33:48 -0500 (0:00:00.691) 0:00:17.902 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 11 January 2025 11:33:49 -0500 (0:00:00.737) 0:00:18.640 ****** fatal: [managed-node3]: FAILED! => { "changed": false } MSG: Unable to start service quadlet-pod-container.service: A dependency job for quadlet-pod-container.service failed. See 'journalctl -xe' for details. TASK [Debug3] ****************************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:127 Saturday 11 January 2025 11:33:49 -0500 (0:00:00.533) 0:00:19.173 ****** ok: [managed-node3] => { "changed": false, "cmd": "set -x\nset -o pipefail\nexec 1>&2\n#podman volume rm --all\n#podman network prune -f\npodman volume ls\npodman network ls\npodman secret ls\npodman container ls\npodman pod ls\npodman images\nsystemctl list-units | grep quadlet\nsystemctl list-unit-files | grep quadlet\nls -alrtF /etc/containers/systemd\n/usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log\n", "delta": "0:00:00.542815", "end": "2025-01-11 11:33:50.754615", "rc": 0, "start": "2025-01-11 11:33:50.211800" } STDERR: + set -o pipefail + exec + podman volume ls + podman network ls NETWORK ID NAME DRIVER 2f259bab93aa podman bridge + podman secret ls ID NAME DRIVER CREATED UPDATED 3bc8b6bb0019144f58c9e5a9d mysql_container_root_password file 22 seconds ago 22 seconds ago + podman container ls CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES + podman pod ls POD ID NAME STATUS CREATED INFRA ID # OF CONTAINERS + podman images REPOSITORY TAG IMAGE ID CREATED SIZE localhost/podman-pause 5.3.1-1733097600 46a99829a23f 8 seconds ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB + systemctl list-units + grep quadlet ● quadlet-pod-pod-pod.service loaded failed failed quadlet-pod-pod-pod.service + systemctl list-unit-files + grep quadlet quadlet-pod-container.service generated - quadlet-pod-pod-pod.service generated - + ls -alrtF /etc/containers/systemd total 8 drwxr-xr-x. 9 root root 175 Jan 11 11:30 ../ -rw-r--r--. 1 root root 70 Jan 11 11:33 quadlet-pod-pod.pod -rw-r--r--. 1 root root 230 Jan 11 11:33 quadlet-pod-container.container drwxr-xr-x. 2 root root 72 Jan 11 11:33 ./ + /usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log quadlet-generator[60293]: Loading source unit file /etc/containers/systemd/quadlet-pod-container.container quadlet-generator[60293]: Loading source unit file /etc/containers/systemd/quadlet-pod-pod.pod ---quadlet-pod-container.service--- # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [X-Container] Image=quay.io/libpod/testimage:20210610 ContainerName=quadlet-pod-container Pod=quadlet-pod-pod.pod Exec=/bin/busybox-extras httpd -f -p 80 [Unit] Wants=network-online.target After=network-online.target SourcePath=/etc/containers/systemd/quadlet-pod-container.container RequiresMountsFor=%t/containers BindsTo=quadlet-pod-pod-pod.service After=quadlet-pod-pod-pod.service [Service] Environment=PODMAN_SYSTEMD_UNIT=%n KillMode=mixed ExecStop=/usr/bin/podman rm -v -f -i --cidfile=%t/%N.cid ExecStopPost=-/usr/bin/podman rm -v -f -i --cidfile=%t/%N.cid Delegate=yes Type=notify NotifyAccess=all SyslogIdentifier=%N ExecStart=/usr/bin/podman run --name quadlet-pod-container --cidfile=%t/%N.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file %t/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ---quadlet-pod-pod-pod.service--- # # Ansible managed # # system_role:podman [X-Pod] PodName=quadlet-pod [Unit] Wants=network-online.target After=network-online.target SourcePath=/etc/containers/systemd/quadlet-pod-pod.pod RequiresMountsFor=%t/containers Wants=quadlet-pod-container.service Before=quadlet-pod-container.service [Service] SyslogIdentifier=%N ExecStart=/usr/bin/podman pod start --pod-id-file=%t/%N.pod-id ExecStop=/usr/bin/podman pod stop --pod-id-file=%t/%N.pod-id --ignore --time=10 ExecStopPost=/usr/bin/podman pod rm --pod-id-file=%t/%N.pod-id --ignore --force ExecStartPre=/usr/bin/podman pod create --infra-conmon-pidfile=%t/%N.pid --pod-id-file=%t/%N.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod Environment=PODMAN_SYSTEMD_UNIT=%n Type=forking Restart=on-failure PIDFile=%t/%N.pid TASK [Check AVCs] ************************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:146 Saturday 11 January 2025 11:33:50 -0500 (0:00:00.897) 0:00:20.071 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "grep", "type=AVC", "/var/log/audit/audit.log" ], "delta": "0:00:00.005206", "end": "2025-01-11 11:33:51.129474", "failed_when_result": false, "rc": 0, "start": "2025-01-11 11:33:51.124268" } STDOUT: type=AVC msg=audit(1736612949.028:603): avc: denied { read } for pid=7754 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1736613057.569:5062): avc: denied { read } for pid=27559 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1736613065.911:5267): avc: denied { read } for pid=28427 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1736613068.020:5388): avc: denied { read } for pid=29007 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1736613116.999:7346): avc: denied { read } for pid=37554 comm="docker-entrypoi" path="/lib/x86_64-linux-gnu/libtinfo.so.5.9" dev="xvda2" ino=436208115 scontext=system_u:system_r:container_t:s0:c340,c389 tcontext=unconfined_u:object_r:data_home_t:s0 tclass=file permissive=0 type=AVC msg=audit(1736613116.999:7347): avc: denied { read } for pid=37554 comm="docker-entrypoi" path="/lib/x86_64-linux-gnu/libdl-2.24.so" dev="xvda2" ino=436207859 scontext=system_u:system_r:container_t:s0:c340,c389 tcontext=unconfined_u:object_r:data_home_t:s0 tclass=file permissive=0 type=AVC msg=audit(1736613116.999:7348): avc: denied { read } for pid=37554 comm="docker-entrypoi" path="/lib/x86_64-linux-gnu/libc-2.24.so" dev="xvda2" ino=436207849 scontext=system_u:system_r:container_t:s0:c340,c389 tcontext=unconfined_u:object_r:data_home_t:s0 tclass=file permissive=0 type=AVC msg=audit(1736613116.999:7349): avc: denied { read } for pid=37554 comm="docker-entrypoi" path="/lib/x86_64-linux-gnu/libc-2.24.so" dev="xvda2" ino=436207849 scontext=system_u:system_r:container_t:s0:c340,c389 tcontext=unconfined_u:object_r:data_home_t:s0 tclass=file permissive=0 type=AVC msg=audit(1736613221.121:12293): avc: denied { read } for pid=58596 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1736613222.385:12353): avc: denied { transition } for pid=58836 comm="3" path="/catatonit" dev="overlay" ino=616562906 scontext=system_u:system_r:unconfined_service_t:s0 tcontext=system_u:system_r:container_t:s0:c323,c775 tclass=process permissive=0 type=AVC msg=audit(1736613223.096:12381): avc: denied { transition } for pid=58961 comm="3" path="/catatonit" dev="overlay" ino=616562906 scontext=system_u:system_r:unconfined_service_t:s0 tcontext=system_u:system_r:container_t:s0:c28,c378 tclass=process permissive=0 type=AVC msg=audit(1736613223.803:12412): avc: denied { transition } for pid=59146 comm="3" path="/catatonit" dev="overlay" ino=616562906 scontext=system_u:system_r:unconfined_service_t:s0 tcontext=system_u:system_r:container_t:s0:c606,c841 tclass=process permissive=0 type=AVC msg=audit(1736613224.577:12436): avc: denied { transition } for pid=59264 comm="3" path="/catatonit" dev="overlay" ino=616562906 scontext=system_u:system_r:unconfined_service_t:s0 tcontext=system_u:system_r:container_t:s0:c198,c617 tclass=process permissive=0 type=AVC msg=audit(1736613225.566:12457): avc: denied { transition } for pid=59416 comm="3" path="/catatonit" dev="overlay" ino=616562906 scontext=system_u:system_r:unconfined_service_t:s0 tcontext=system_u:system_r:container_t:s0:c209,c526 tclass=process permissive=0 type=AVC msg=audit(1736613229.201:12580): avc: denied { read } for pid=60020 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 TASK [Dump journal] ************************************************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:151 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.392) 0:00:20.464 ****** fatal: [managed-node3]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.032272", "end": "2025-01-11 11:33:51.562200", "failed_when_result": true, "rc": 0, "start": "2025-01-11 11:33:51.529928" } STDOUT: Jan 11 11:30:39 managed-node3 python3.12[25281]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613038.3102617-11397-183252814259992/.source.container dest=/etc/containers/systemd/nopull.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=670d64fc68a9768edb20cad26df2acc703542d85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:30:41 managed-node3 python3.12[25543]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:42 managed-node3 python3.12[25680]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:44 managed-node3 python3.12[25813]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:46 managed-node3 systemd[1]: var-lib-containers-storage-overlay-compat3372456078-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat3372456078-merged.mount has successfully entered the 'dead' state. Jan 11 11:30:46 managed-node3 kernel: evm: overlay not supported Jan 11 11:30:46 managed-node3 podman[25947]: 2025-01-11 11:30:46.877429908 -0500 EST m=+0.066758337 system refresh Jan 11 11:30:47 managed-node3 podman[25957]: 2025-01-11 11:30:47.094679943 -0500 EST m=+0.112971453 image pull-error this_is_a_bogus_image:latest short-name resolution enforced but cannot prompt without a TTY Jan 11 11:30:47 managed-node3 python3.12[26095]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:30:47 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:30:48 managed-node3 python3.12[26226]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:30:48 managed-node3 python3.12[26331]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613047.7212226-11731-256101618506701/.source.container dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:30:50 managed-node3 python3.12[26593]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:51 managed-node3 python3.12[26730]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:53 managed-node3 python3.12[26863]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:55 managed-node3 python3.12[26996]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:30:55 managed-node3 python3.12[27128]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:56 managed-node3 python3.12[27392]: ansible-file Invoked with path=/etc/containers/systemd/nopull.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:30:57 managed-node3 python3.12[27523]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:30:57 managed-node3 systemd[1]: Reload requested from client PID 27524 ('systemctl') (unit session-5.scope)... Jan 11 11:30:57 managed-node3 systemd[1]: Reloading... Jan 11 11:30:57 managed-node3 systemd-ssh-generator[27559]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:30:57 managed-node3 (sd-exec-[27542]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:30:57 managed-node3 systemd-rc-local-generator[27557]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:30:57 managed-node3 systemd[1]: Reloading finished in 197 ms. Jan 11 11:30:58 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:01 managed-node3 python3.12[27976]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:02 managed-node3 python3.12[28113]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:04 managed-node3 python3.12[28246]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:05 managed-node3 python3.12[28379]: ansible-systemd Invoked with name=bogus.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:31:05 managed-node3 systemd[1]: Reload requested from client PID 28382 ('systemctl') (unit session-5.scope)... Jan 11 11:31:05 managed-node3 systemd[1]: Reloading... Jan 11 11:31:05 managed-node3 quadlet-generator[27546]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details. Jan 11 11:31:05 managed-node3 quadlet-generator[28404]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details. Jan 11 11:31:05 managed-node3 systemd-ssh-generator[28427]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:05 managed-node3 systemd-rc-local-generator[28425]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:05 managed-node3 (sd-exec-[28400]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:06 managed-node3 systemd[1]: Reloading finished in 196 ms. Jan 11 11:31:06 managed-node3 python3.12[28566]: ansible-stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:07 managed-node3 python3.12[28830]: ansible-file Invoked with path=/etc/containers/systemd/bogus.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:07 managed-node3 python3.12[28961]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:07 managed-node3 systemd[1]: Reload requested from client PID 28962 ('systemctl') (unit session-5.scope)... Jan 11 11:31:07 managed-node3 systemd[1]: Reloading... Jan 11 11:31:08 managed-node3 systemd-rc-local-generator[29005]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:08 managed-node3 systemd-ssh-generator[29007]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:08 managed-node3 (sd-exec-[28980]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:08 managed-node3 systemd[1]: Reloading finished in 193 ms. Jan 11 11:31:08 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:09 managed-node3 python3.12[29283]: ansible-user Invoked with name=user_quadlet_basic uid=1111 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jan 11 11:31:09 managed-node3 useradd[29285]: new group: name=user_quadlet_basic, GID=1111 Jan 11 11:31:09 managed-node3 useradd[29285]: new user: name=user_quadlet_basic, UID=1111, GID=1111, home=/home/user_quadlet_basic, shell=/bin/bash, from=/dev/pts/0 Jan 11 11:31:12 managed-node3 python3.12[29547]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:13 managed-node3 python3.12[29684]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:15 managed-node3 python3.12[29817]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None Jan 11 11:31:16 managed-node3 python3.12[29949]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:16 managed-node3 systemd[1]: Created slice user-1111.slice - User Slice of UID 1111. ░░ Subject: A start job for unit user-1111.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-1111.slice has finished successfully. ░░ ░░ The job identifier is 1546. Jan 11 11:31:16 managed-node3 systemd[1]: Starting user-runtime-dir@1111.service - User Runtime Directory /run/user/1111... ░░ Subject: A start job for unit user-runtime-dir@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@1111.service has begun execution. ░░ ░░ The job identifier is 1545. Jan 11 11:31:16 managed-node3 systemd[1]: Finished user-runtime-dir@1111.service - User Runtime Directory /run/user/1111. ░░ Subject: A start job for unit user-runtime-dir@1111.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@1111.service has finished successfully. ░░ ░░ The job identifier is 1545. Jan 11 11:31:16 managed-node3 systemd[1]: Starting user@1111.service - User Manager for UID 1111... ░░ Subject: A start job for unit user@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@1111.service has begun execution. ░░ ░░ The job identifier is 1625. Jan 11 11:31:16 managed-node3 systemd-logind[657]: New session 6 of user user_quadlet_basic. ░░ Subject: A new session 6 has been created for user user_quadlet_basic ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user user_quadlet_basic. ░░ ░░ The leading process of the session is 29953. Jan 11 11:31:16 managed-node3 (systemd)[29953]: pam_unix(systemd-user:session): session opened for user user_quadlet_basic(uid=1111) by user_quadlet_basic(uid=0) Jan 11 11:31:16 managed-node3 systemd[29953]: Queued start job for default target default.target. Jan 11 11:31:16 managed-node3 systemd[29953]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jan 11 11:31:16 managed-node3 systemd[29953]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 11 11:31:16 managed-node3 systemd[29953]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 11 11:31:16 managed-node3 systemd[29953]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 11 11:31:16 managed-node3 systemd[29953]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 11 11:31:16 managed-node3 systemd[29953]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Jan 11 11:31:16 managed-node3 systemd[29953]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jan 11 11:31:16 managed-node3 systemd[29953]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 11 11:31:16 managed-node3 systemd[29953]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 11 11:31:16 managed-node3 systemd[29953]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 11 11:31:16 managed-node3 systemd[29953]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 11 11:31:16 managed-node3 systemd[29953]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 11 11:31:16 managed-node3 systemd[29953]: Startup finished in 70ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 1111 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 70397 microseconds. Jan 11 11:31:16 managed-node3 systemd[1]: Started user@1111.service - User Manager for UID 1111. ░░ Subject: A start job for unit user@1111.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@1111.service has finished successfully. ░░ ░░ The job identifier is 1625. Jan 11 11:31:16 managed-node3 python3.12[30099]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:16 managed-node3 sudo[30274]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-avxipeoxrjhlkfrazvfkttsavzknrimr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613076.7151632-12877-15443294202840/AnsiballZ_podman_secret.py' Jan 11 11:31:16 managed-node3 sudo[30274]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:17 managed-node3 systemd[29953]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 11 11:31:17 managed-node3 systemd[29953]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 11 11:31:17 managed-node3 dbus-broker-launch[30306]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 11 11:31:17 managed-node3 dbus-broker-launch[30306]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 11 11:31:17 managed-node3 systemd[29953]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 11 11:31:17 managed-node3 dbus-broker-launch[30306]: Ready Jan 11 11:31:17 managed-node3 systemd[29953]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 11 11:31:17 managed-node3 systemd[29953]: Started podman-30290.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 11 11:31:17 managed-node3 systemd[29953]: Started podman-pause-60dc1641.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Jan 11 11:31:17 managed-node3 systemd[29953]: Started podman-30308.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jan 11 11:31:17 managed-node3 systemd[29953]: Started podman-30316.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Jan 11 11:31:18 managed-node3 sudo[30274]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:19 managed-node3 python3.12[30453]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:19 managed-node3 python3.12[30584]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:20 managed-node3 sudo[30759]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dwntgnrpngizjiznvufvyqulguydtjcy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613080.02285-12950-111514160688076/AnsiballZ_podman_secret.py' Jan 11 11:31:20 managed-node3 sudo[30759]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:20 managed-node3 systemd[29953]: Started podman-30769.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Jan 11 11:31:20 managed-node3 systemd[29953]: Started podman-30777.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Jan 11 11:31:20 managed-node3 systemd[29953]: Started podman-30785.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Jan 11 11:31:20 managed-node3 sudo[30759]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:21 managed-node3 python3.12[30922]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:22 managed-node3 python3.12[31055]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:22 managed-node3 python3.12[31187]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:23 managed-node3 python3.12[31319]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:24 managed-node3 python3.12[31450]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:24 managed-node3 python3.12[31581]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:24 managed-node3 python3.12[31686]: ansible-ansible.legacy.copy Invoked with dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network owner=user_quadlet_basic group=1111 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736613084.2410774-13105-219658197907052/.source.network _original_basename=.g3ne8830 follow=False checksum=19c9b17be2af9b9deca5c3bd327f048966750682 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:25 managed-node3 sudo[31859]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-judnrhcxiyrmxoqgnstgcbryyvbpxslz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613084.9635346-13143-15106682890183/AnsiballZ_systemd.py' Jan 11 11:31:25 managed-node3 sudo[31859]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:25 managed-node3 python3.12[31862]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:25 managed-node3 systemd[29953]: Reload requested from client PID 31863 ('systemctl')... Jan 11 11:31:25 managed-node3 systemd[29953]: Reloading... Jan 11 11:31:25 managed-node3 systemd[29953]: Reloading finished in 37 ms. Jan 11 11:31:25 managed-node3 sudo[31859]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:25 managed-node3 sudo[32045]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tjkfapunhxorqvhumnjcumrorpkgfobz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613085.6133273-13171-166038780946792/AnsiballZ_systemd.py' Jan 11 11:31:25 managed-node3 sudo[32045]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:26 managed-node3 python3.12[32048]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:26 managed-node3 systemd[29953]: Starting podman-user-wait-network-online.service - Wait for system level network-online.target as user.... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 59. Jan 11 11:31:26 managed-node3 sh[32052]: active Jan 11 11:31:26 managed-node3 systemd[29953]: Finished podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 59. Jan 11 11:31:26 managed-node3 systemd[29953]: Starting quadlet-basic-network.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 47. Jan 11 11:31:26 managed-node3 quadlet-basic-network[32054]: quadlet-basic-name Jan 11 11:31:26 managed-node3 systemd[29953]: Finished quadlet-basic-network.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Jan 11 11:31:26 managed-node3 sudo[32045]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:27 managed-node3 python3.12[32192]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:27 managed-node3 python3.12[32325]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:27 managed-node3 python3.12[32457]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:29 managed-node3 python3.12[32589]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:29 managed-node3 python3.12[32720]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:30 managed-node3 python3.12[32851]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:30 managed-node3 python3.12[32956]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613089.8482535-13338-83917486083226/.source.network dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:30 managed-node3 sudo[33129]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gieamnuaoahbegtqksokeqhcwvhytali ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613090.578231-13370-65639197134826/AnsiballZ_systemd.py' Jan 11 11:31:30 managed-node3 sudo[33129]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:31 managed-node3 python3.12[33132]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:31 managed-node3 systemd[29953]: Reload requested from client PID 33133 ('systemctl')... Jan 11 11:31:31 managed-node3 systemd[29953]: Reloading... Jan 11 11:31:31 managed-node3 systemd[29953]: Reloading finished in 39 ms. Jan 11 11:31:31 managed-node3 sudo[33129]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:31 managed-node3 sudo[33315]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pvkaajvcijpqwmdbqpbrokublruligrd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613091.23687-13400-21248509799713/AnsiballZ_systemd.py' Jan 11 11:31:31 managed-node3 sudo[33315]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:31 managed-node3 python3.12[33318]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:31 managed-node3 systemd[29953]: Starting quadlet-basic-unused-network-network.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 60. Jan 11 11:31:31 managed-node3 quadlet-basic-unused-network-network[33321]: systemd-quadlet-basic-unused-network Jan 11 11:31:31 managed-node3 systemd[29953]: Finished quadlet-basic-unused-network-network.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 60. Jan 11 11:31:31 managed-node3 sudo[33315]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:32 managed-node3 python3.12[33459]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:33 managed-node3 python3.12[33592]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:33 managed-node3 python3.12[33724]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:34 managed-node3 python3.12[33856]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:35 managed-node3 python3.12[33987]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:35 managed-node3 python3.12[34118]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:36 managed-node3 python3.12[34223]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613095.4446378-13576-269972147901892/.source.volume dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=90a3571bfc7670328fe3f8fb625585613dbd9c4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:36 managed-node3 sudo[34396]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-znmlewfejphqczptpxrtplngttyqifke ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613096.1590877-13607-158223923579391/AnsiballZ_systemd.py' Jan 11 11:31:36 managed-node3 sudo[34396]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:36 managed-node3 python3.12[34399]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:36 managed-node3 systemd[29953]: Reload requested from client PID 34400 ('systemctl')... Jan 11 11:31:36 managed-node3 systemd[29953]: Reloading... Jan 11 11:31:36 managed-node3 systemd[29953]: Reloading finished in 39 ms. Jan 11 11:31:36 managed-node3 sudo[34396]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:37 managed-node3 sudo[34582]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cjnisejsqpmgphnpswpkgxbgdksyruii ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613096.8421135-13630-12722030320446/AnsiballZ_systemd.py' Jan 11 11:31:37 managed-node3 sudo[34582]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:37 managed-node3 python3.12[34585]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:37 managed-node3 systemd[29953]: Starting quadlet-basic-mysql-volume.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 73. Jan 11 11:31:37 managed-node3 quadlet-basic-mysql-volume[34588]: quadlet-basic-mysql-name Jan 11 11:31:37 managed-node3 systemd[29953]: Finished quadlet-basic-mysql-volume.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 73. Jan 11 11:31:37 managed-node3 sudo[34582]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:38 managed-node3 python3.12[34727]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:38 managed-node3 python3.12[34860]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:39 managed-node3 python3.12[34992]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:40 managed-node3 python3.12[35124]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:41 managed-node3 python3.12[35255]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:41 managed-node3 python3.12[35386]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:42 managed-node3 python3.12[35491]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613101.373204-13827-106009179447385/.source.volume dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=fd0ae560360afa5541b866560b1e849d25e216ef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:42 managed-node3 sudo[35664]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gzbxfjidrwldagfpfjxfakunpdndbxml ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613102.1354904-13858-226367834609827/AnsiballZ_systemd.py' Jan 11 11:31:42 managed-node3 sudo[35664]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:42 managed-node3 python3.12[35667]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:42 managed-node3 systemd[29953]: Reload requested from client PID 35668 ('systemctl')... Jan 11 11:31:42 managed-node3 systemd[29953]: Reloading... Jan 11 11:31:42 managed-node3 systemd[29953]: Reloading finished in 40 ms. Jan 11 11:31:42 managed-node3 sudo[35664]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:43 managed-node3 sudo[35850]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bifqattdlqdynwwkzwhyvfvjtsmxqjbb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613102.8123279-13888-16675578239037/AnsiballZ_systemd.py' Jan 11 11:31:43 managed-node3 sudo[35850]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:43 managed-node3 python3.12[35853]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:43 managed-node3 systemd[29953]: Starting quadlet-basic-unused-volume-volume.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 86. Jan 11 11:31:43 managed-node3 quadlet-basic-unused-volume-volume[35856]: systemd-quadlet-basic-unused-volume Jan 11 11:31:43 managed-node3 systemd[29953]: Finished quadlet-basic-unused-volume-volume.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Jan 11 11:31:43 managed-node3 sudo[35850]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:44 managed-node3 python3.12[35995]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:44 managed-node3 python3.12[36128]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:45 managed-node3 python3.12[36260]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:46 managed-node3 python3.12[36392]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:46 managed-node3 sudo[36565]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yejaoueywdzgyawvvauyidvsaoeljkuw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613106.406838-14031-178920898729166/AnsiballZ_podman_image.py' Jan 11 11:31:46 managed-node3 sudo[36565]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:46 managed-node3 systemd[29953]: Started podman-36569.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 99. Jan 11 11:31:46 managed-node3 systemd[29953]: Started podman-36577.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 103. Jan 11 11:31:53 managed-node3 systemd[29953]: podman-36577.scope: Consumed 8.710s CPU time, 472.3M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Jan 11 11:31:53 managed-node3 systemd[29953]: Started podman-36750.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Jan 11 11:31:54 managed-node3 sudo[36565]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:54 managed-node3 python3.12[36888]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:54 managed-node3 python3.12[37019]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:55 managed-node3 python3.12[37124]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613114.5874217-14235-248964580093201/.source.container dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=0b6cac7929623f1059e78ef39b8b0a25169b28a6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:55 managed-node3 sudo[37297]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qjigkbqqbsumpewtjyywkklizpdscvme ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613115.4036748-14257-2932215252036/AnsiballZ_systemd.py' Jan 11 11:31:55 managed-node3 sudo[37297]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:55 managed-node3 python3.12[37300]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:55 managed-node3 systemd[29953]: Reload requested from client PID 37301 ('systemctl')... Jan 11 11:31:55 managed-node3 systemd[29953]: Reloading... Jan 11 11:31:55 managed-node3 systemd[29953]: Reloading finished in 42 ms. Jan 11 11:31:55 managed-node3 sudo[37297]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:56 managed-node3 sudo[37483]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jttwmojzbswtvzunudlkxxoaaojyxqds ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613116.0337892-14274-250502948281504/AnsiballZ_systemd.py' Jan 11 11:31:56 managed-node3 sudo[37483]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:56 managed-node3 python3.12[37486]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:56 managed-node3 systemd[29953]: Starting quadlet-basic-mysql.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 111. Jan 11 11:31:56 managed-node3 kernel: tun: Universal TUN/TAP device driver, 1.6 Jan 11 11:31:56 managed-node3 systemd[29953]: Started rootless-netns-e1dda47f.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 126. Jan 11 11:31:56 managed-node3 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jan 11 11:31:56 managed-node3 kernel: podman1: port 1(veth0) entered blocking state Jan 11 11:31:56 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Jan 11 11:31:56 managed-node3 kernel: veth0: entered allmulticast mode Jan 11 11:31:56 managed-node3 kernel: veth0: entered promiscuous mode Jan 11 11:31:56 managed-node3 kernel: podman1: port 1(veth0) entered blocking state Jan 11 11:31:56 managed-node3 kernel: podman1: port 1(veth0) entered forwarding state Jan 11 11:31:56 managed-node3 systemd[29953]: Started run-p37546-i37846.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/1111/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 130. Jan 11 11:31:56 managed-node3 systemd[29953]: Started quadlet-basic-mysql.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 111. Jan 11 11:31:57 managed-node3 quadlet-basic-mysql[37489]: ebf545f510b185bac07f58eac6edcfcdbaa1f2590184b5c102a917b7b2414ffc Jan 11 11:31:57 managed-node3 sudo[37483]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:57 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Jan 11 11:31:57 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:31:57 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:31:57 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Jan 11 11:31:57 managed-node3 systemd[29953]: quadlet-basic-mysql.service: Main process exited, code=exited, status=127/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit UNIT has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 127. Jan 11 11:31:57 managed-node3 systemd[29953]: quadlet-basic-mysql.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT has entered the 'failed' state with result 'exit-code'. Jan 11 11:31:57 managed-node3 python3.12[37713]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:58 managed-node3 python3.12[37845]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:00 managed-node3 python3.12[37977]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:00 managed-node3 python3.12[38109]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_basic follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:02 managed-node3 python3.12[38373]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:03 managed-node3 python3.12[38510]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:04 managed-node3 python3.12[38643]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:04 managed-node3 python3.12[38775]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:07 managed-node3 python3.12[38907]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:07 managed-node3 sudo[39082]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yvmxxhpzvvgqielvmbqcweqwdztzymmd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613127.4405448-14633-245979959966301/AnsiballZ_podman_secret.py' Jan 11 11:32:07 managed-node3 sudo[39082]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:07 managed-node3 systemd[29953]: Started podman-39086.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 134. Jan 11 11:32:07 managed-node3 sudo[39082]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:09 managed-node3 python3.12[39223]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:09 managed-node3 sudo[39398]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-csblizunbfvzmlfftxoqtdjcntsrnngp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613129.4278853-14724-51853748470117/AnsiballZ_podman_secret.py' Jan 11 11:32:09 managed-node3 sudo[39398]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:09 managed-node3 systemd[29953]: Started podman-39402.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 138. Jan 11 11:32:09 managed-node3 sudo[39398]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:10 managed-node3 python3.12[39540]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:11 managed-node3 python3.12[39673]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:11 managed-node3 python3.12[39805]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:12 managed-node3 python3.12[39937]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:13 managed-node3 sudo[40112]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ocxduommqxitdwnbedqsszjqaqdntcaj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613132.9935164-14872-92584878593836/AnsiballZ_systemd.py' Jan 11 11:32:13 managed-node3 sudo[40112]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:13 managed-node3 python3.12[40115]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:32:13 managed-node3 systemd[29953]: Reload requested from client PID 40118 ('systemctl')... Jan 11 11:32:13 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:13 managed-node3 systemd[29953]: Reloading finished in 42 ms. Jan 11 11:32:13 managed-node3 sudo[40112]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:13 managed-node3 python3.12[40259]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:14 managed-node3 python3.12[40523]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:15 managed-node3 sudo[40696]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lzwfvxkfspvoznvkuqhvovftzcydlmgw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613134.9800293-14942-55593340973778/AnsiballZ_systemd.py' Jan 11 11:32:15 managed-node3 sudo[40696]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:15 managed-node3 python3.12[40699]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:15 managed-node3 systemd[29953]: Reload requested from client PID 40700 ('systemctl')... Jan 11 11:32:15 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:15 managed-node3 systemd[29953]: Reloading finished in 43 ms. Jan 11 11:32:15 managed-node3 sudo[40696]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:15 managed-node3 sudo[40882]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-knyeodcprzbiypreezsfqbknyxrhhbsa ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613135.6275008-14959-34319500053231/AnsiballZ_command.py' Jan 11 11:32:15 managed-node3 sudo[40882]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:16 managed-node3 systemd[29953]: Started podman-40886.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 142. Jan 11 11:32:16 managed-node3 sudo[40882]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:16 managed-node3 sudo[41065]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jytjagiplhuhrzswzwaosvwhotddftkt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613136.2272286-14978-110077178975768/AnsiballZ_command.py' Jan 11 11:32:16 managed-node3 sudo[41065]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:16 managed-node3 python3.12[41068]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:16 managed-node3 systemd[29953]: Started podman-41069.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 146. Jan 11 11:32:17 managed-node3 sudo[41065]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:17 managed-node3 sudo[41249]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hllzzhqpmtcjlnistruziuasggarrark ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613137.5180597-14999-25111524180527/AnsiballZ_command.py' Jan 11 11:32:17 managed-node3 sudo[41249]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:17 managed-node3 python3.12[41252]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:17 managed-node3 systemd[29953]: Started podman-41253.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 150. Jan 11 11:32:17 managed-node3 sudo[41249]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:18 managed-node3 sudo[41433]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qrrwcybanbyozqsysrdltneidcmfbrwg ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613138.006892-15009-274072191063702/AnsiballZ_command.py' Jan 11 11:32:18 managed-node3 sudo[41433]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:18 managed-node3 python3.12[41436]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:18 managed-node3 systemd[29953]: Started podman-41437.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 154. Jan 11 11:32:18 managed-node3 sudo[41433]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:18 managed-node3 sudo[41617]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-prfczuukauwjvzfeckpiacmksfaghvgl ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613138.5093179-15026-260297364284951/AnsiballZ_command.py' Jan 11 11:32:18 managed-node3 sudo[41617]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:18 managed-node3 python3.12[41620]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:18 managed-node3 systemd[29953]: Started podman-41621.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 158. Jan 11 11:32:18 managed-node3 sudo[41617]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:19 managed-node3 sudo[41801]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nflymdojzerdynxziygvrkpgkwivzoap ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613138.9904163-15036-68605387076336/AnsiballZ_command.py' Jan 11 11:32:19 managed-node3 sudo[41801]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:19 managed-node3 python3.12[41804]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:19 managed-node3 systemd[29953]: Started podman-41805.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 162. Jan 11 11:32:19 managed-node3 sudo[41801]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:19 managed-node3 sudo[41984]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bvysutorsyzhjrzcczswcnwdctcwxmmm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613139.4753275-15053-13673547339577/AnsiballZ_command.py' Jan 11 11:32:19 managed-node3 sudo[41984]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:19 managed-node3 systemd[29953]: Started podman-41988.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 166. Jan 11 11:32:19 managed-node3 sudo[41984]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:20 managed-node3 sudo[42168]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-werphmcnftfwxfjwrpmrwysmxvarqjoc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613139.9615653-15063-209914961622125/AnsiballZ_command.py' Jan 11 11:32:20 managed-node3 sudo[42168]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:20 managed-node3 systemd[29953]: Started podman-42172.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 170. Jan 11 11:32:20 managed-node3 sudo[42168]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:20 managed-node3 sudo[42351]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ccwgrqxgrooemqnkxghvghzdldenxrwd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613140.447228-15080-276444025678020/AnsiballZ_service_facts.py' Jan 11 11:32:20 managed-node3 sudo[42351]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:20 managed-node3 python3.12[42354]: ansible-service_facts Invoked Jan 11 11:32:23 managed-node3 sudo[42351]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:24 managed-node3 python3.12[42594]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:24 managed-node3 python3.12[42727]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:25 managed-node3 python3.12[42859]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:26 managed-node3 python3.12[42991]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:26 managed-node3 sudo[43166]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qzaetfbsqafgfwxudllfxfsbltawrfpy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613146.1629307-15178-5179065352967/AnsiballZ_systemd.py' Jan 11 11:32:26 managed-node3 sudo[43166]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:26 managed-node3 python3.12[43169]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:32:26 managed-node3 systemd[29953]: Reload requested from client PID 43172 ('systemctl')... Jan 11 11:32:26 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:26 managed-node3 systemd[29953]: Reloading finished in 41 ms. Jan 11 11:32:26 managed-node3 systemd[29953]: Stopped quadlet-basic-unused-volume-volume.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 174 and the job result is done. Jan 11 11:32:26 managed-node3 sudo[43166]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:27 managed-node3 python3.12[43313]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:28 managed-node3 python3.12[43577]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:28 managed-node3 sudo[43750]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oswyiuzqfslukkvmebhjffiawnuhugzr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613148.2279422-15237-77436776759072/AnsiballZ_systemd.py' Jan 11 11:32:28 managed-node3 sudo[43750]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:28 managed-node3 python3.12[43753]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:28 managed-node3 systemd[29953]: Reload requested from client PID 43754 ('systemctl')... Jan 11 11:32:28 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:28 managed-node3 systemd[29953]: Reloading finished in 40 ms. Jan 11 11:32:28 managed-node3 sudo[43750]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:29 managed-node3 sudo[43937]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xhztpzncvtnsrzovwxczjzxypbzucjus ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613148.8688755-15254-224558205393549/AnsiballZ_command.py' Jan 11 11:32:29 managed-node3 sudo[43937]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:29 managed-node3 systemd[29953]: Started podman-43941.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 175. Jan 11 11:32:29 managed-node3 sudo[43937]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:29 managed-node3 sudo[44120]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kinrlbhhhnqykpfwqdwhrdmzreljcnkz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613149.4480538-15266-184365846934705/AnsiballZ_command.py' Jan 11 11:32:29 managed-node3 sudo[44120]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:29 managed-node3 python3.12[44123]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:29 managed-node3 systemd[29953]: Started podman-44124.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 179. Jan 11 11:32:29 managed-node3 sudo[44120]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:30 managed-node3 sudo[44304]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ecscxesocxjbaijxnekgjkphcjppkpdo ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613150.1066644-15287-153720165973771/AnsiballZ_command.py' Jan 11 11:32:30 managed-node3 sudo[44304]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:30 managed-node3 python3.12[44307]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:30 managed-node3 systemd[29953]: Started podman-44308.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 183. Jan 11 11:32:30 managed-node3 sudo[44304]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:30 managed-node3 sudo[44487]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mvpbtgaswrldrpfabhuaugzuoxjqiaao ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613150.590966-15297-118183946335853/AnsiballZ_command.py' Jan 11 11:32:30 managed-node3 sudo[44487]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:30 managed-node3 python3.12[44490]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:30 managed-node3 systemd[29953]: Started podman-44491.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 187. Jan 11 11:32:30 managed-node3 sudo[44487]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:31 managed-node3 sudo[44672]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-phkkkwrciwvimaxhqpwqlwlllokahmst ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613151.1694026-15317-107071091083711/AnsiballZ_command.py' Jan 11 11:32:31 managed-node3 sudo[44672]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:31 managed-node3 python3.12[44675]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:31 managed-node3 systemd[29953]: Started podman-44676.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 191. Jan 11 11:32:31 managed-node3 sudo[44672]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:31 managed-node3 sudo[44856]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-frdygsuahlrtmtbqwbnzpycnjdnnsild ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613151.6694036-15339-10608296490480/AnsiballZ_command.py' Jan 11 11:32:31 managed-node3 sudo[44856]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:31 managed-node3 python3.12[44859]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:32 managed-node3 systemd[29953]: Started podman-44860.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 195. Jan 11 11:32:32 managed-node3 sudo[44856]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:32 managed-node3 sudo[45040]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-emywwtgfcenjejbogbtikyfhyrjwbqdh ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613152.1738348-15365-51371937836539/AnsiballZ_command.py' Jan 11 11:32:32 managed-node3 sudo[45040]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:32 managed-node3 systemd[29953]: Started podman-45044.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 199. Jan 11 11:32:32 managed-node3 sudo[45040]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:32 managed-node3 sudo[45223]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-luwujmvgrbrtbtmrsvxxyhzbzuojsplx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613152.6817553-15384-262447572271742/AnsiballZ_command.py' Jan 11 11:32:32 managed-node3 sudo[45223]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:33 managed-node3 systemd[29953]: Started podman-45227.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 203. Jan 11 11:32:33 managed-node3 sudo[45223]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:33 managed-node3 sudo[45406]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gepcymxvzluuukkrdekutwcirittacyp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613153.1890347-15409-206095138219026/AnsiballZ_service_facts.py' Jan 11 11:32:33 managed-node3 sudo[45406]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:33 managed-node3 python3.12[45409]: ansible-service_facts Invoked Jan 11 11:32:36 managed-node3 sudo[45406]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:36 managed-node3 python3.12[45649]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:37 managed-node3 python3.12[45782]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:37 managed-node3 python3.12[45914]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:38 managed-node3 python3.12[46046]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:39 managed-node3 sudo[46221]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ygjkadouzrsfmsuusvdxznhlliradfmz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613158.8254309-15535-31899405719367/AnsiballZ_systemd.py' Jan 11 11:32:39 managed-node3 sudo[46221]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:39 managed-node3 python3.12[46224]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:32:39 managed-node3 systemd[29953]: Reload requested from client PID 46227 ('systemctl')... Jan 11 11:32:39 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:39 managed-node3 systemd[29953]: Reloading finished in 40 ms. Jan 11 11:32:39 managed-node3 systemd[29953]: Stopped quadlet-basic-mysql-volume.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 207 and the job result is done. Jan 11 11:32:39 managed-node3 sudo[46221]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:39 managed-node3 python3.12[46368]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:40 managed-node3 python3.12[46632]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:41 managed-node3 sudo[46805]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nlqbazwakvjstpzbmlxkgcnsprxzfjnf ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613160.9532418-15612-158023406770613/AnsiballZ_systemd.py' Jan 11 11:32:41 managed-node3 sudo[46805]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:41 managed-node3 python3.12[46808]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:41 managed-node3 systemd[29953]: Reload requested from client PID 46809 ('systemctl')... Jan 11 11:32:41 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:41 managed-node3 systemd[29953]: Reloading finished in 41 ms. Jan 11 11:32:41 managed-node3 sudo[46805]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:41 managed-node3 sudo[46991]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-olvqqqlgujiakgkdrnxdvzcphlqhcrrq ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613161.6072545-15629-194620579547675/AnsiballZ_command.py' Jan 11 11:32:41 managed-node3 sudo[46991]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:41 managed-node3 systemd[29953]: Started podman-46995.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 208. Jan 11 11:32:42 managed-node3 sudo[46991]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:42 managed-node3 sudo[47175]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ntdbeeglzlxijrndxzcjpaoitaxuiuzp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613162.2185133-15648-214242175966943/AnsiballZ_command.py' Jan 11 11:32:42 managed-node3 sudo[47175]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:42 managed-node3 python3.12[47178]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:42 managed-node3 systemd[29953]: Started podman-47179.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 212. Jan 11 11:32:42 managed-node3 sudo[47175]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:43 managed-node3 sudo[47359]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eenwafrenrddxfbyrebthlmheplertmy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613162.8959327-15662-100119550112663/AnsiballZ_command.py' Jan 11 11:32:43 managed-node3 sudo[47359]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:43 managed-node3 python3.12[47362]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:43 managed-node3 systemd[29953]: Started podman-47363.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 216. Jan 11 11:32:43 managed-node3 sudo[47359]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:43 managed-node3 sudo[47542]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vmfjrvkpjoqkyjpepvoauwirvdwbtmov ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613163.3848853-15679-126353248656444/AnsiballZ_command.py' Jan 11 11:32:43 managed-node3 sudo[47542]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:43 managed-node3 python3.12[47545]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:43 managed-node3 systemd[29953]: Started podman-47546.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 220. Jan 11 11:32:43 managed-node3 sudo[47542]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:44 managed-node3 sudo[47725]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bsxybzzyhbafngjkgkaimpsnjracquul ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613163.8730524-15689-240419964070751/AnsiballZ_command.py' Jan 11 11:32:44 managed-node3 sudo[47725]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:44 managed-node3 python3.12[47728]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:44 managed-node3 systemd[29953]: Started podman-47729.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 224. Jan 11 11:32:44 managed-node3 sudo[47725]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:44 managed-node3 sudo[47909]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sgxohzvnyrevubejyssguuqdajxurapj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613164.3751202-15706-134908010713081/AnsiballZ_command.py' Jan 11 11:32:44 managed-node3 sudo[47909]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:44 managed-node3 python3.12[47912]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:44 managed-node3 systemd[29953]: Started podman-47913.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 228. Jan 11 11:32:45 managed-node3 sudo[47909]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:46 managed-node3 sudo[48092]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kyyaodyviumlxgwqmfzpbsowdkmqwyes ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613165.923966-15730-29888098965952/AnsiballZ_command.py' Jan 11 11:32:46 managed-node3 sudo[48092]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:46 managed-node3 systemd[29953]: Started podman-48096.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 232. Jan 11 11:32:46 managed-node3 sudo[48092]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:46 managed-node3 sudo[48276]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ggzunytuwzdiqomnjnnlhuniaqzoeecx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613166.4153993-15747-220401453367475/AnsiballZ_command.py' Jan 11 11:32:46 managed-node3 sudo[48276]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:46 managed-node3 systemd[29953]: Started podman-48280.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 236. Jan 11 11:32:46 managed-node3 sudo[48276]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:47 managed-node3 sudo[48461]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pcxnlefzejfqdvcymkgbqnehzjtxjigc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613166.901463-15757-108990047623750/AnsiballZ_service_facts.py' Jan 11 11:32:47 managed-node3 sudo[48461]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:47 managed-node3 python3.12[48464]: ansible-service_facts Invoked Jan 11 11:32:50 managed-node3 sudo[48461]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:51 managed-node3 python3.12[48704]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:51 managed-node3 python3.12[48837]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:52 managed-node3 python3.12[48969]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:53 managed-node3 python3.12[49101]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:53 managed-node3 sudo[49276]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-evgxfoyxiwbqdqempbsppcroxpnfhlxw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613173.5064712-15869-18986601889721/AnsiballZ_systemd.py' Jan 11 11:32:53 managed-node3 sudo[49276]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:53 managed-node3 python3.12[49279]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:32:53 managed-node3 systemd[29953]: Reload requested from client PID 49282 ('systemctl')... Jan 11 11:32:53 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:54 managed-node3 systemd[29953]: Reloading finished in 39 ms. Jan 11 11:32:54 managed-node3 systemd[29953]: Stopped quadlet-basic-unused-network-network.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 240 and the job result is done. Jan 11 11:32:54 managed-node3 sudo[49276]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:54 managed-node3 python3.12[49424]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:55 managed-node3 python3.12[49688]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:55 managed-node3 sudo[49861]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gttdsfiezuikxpvcvetqjlvxxupkwbsa ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613175.5939744-15921-40852223920198/AnsiballZ_systemd.py' Jan 11 11:32:55 managed-node3 sudo[49861]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:56 managed-node3 python3.12[49864]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:56 managed-node3 systemd[29953]: Reload requested from client PID 49865 ('systemctl')... Jan 11 11:32:56 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:56 managed-node3 systemd[29953]: Reloading finished in 38 ms. Jan 11 11:32:56 managed-node3 sudo[49861]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:56 managed-node3 sudo[50047]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sjpvqiylotnhfwaghjphbqbbrsokfson ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613176.2481945-15942-28777684928469/AnsiballZ_command.py' Jan 11 11:32:56 managed-node3 sudo[50047]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:56 managed-node3 systemd[29953]: Started podman-50051.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 241. Jan 11 11:32:56 managed-node3 sudo[50047]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:57 managed-node3 sudo[50232]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mfbdcyqongrzwyenglsyzsvowblforje ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613176.8512917-15964-59712057301008/AnsiballZ_command.py' Jan 11 11:32:57 managed-node3 sudo[50232]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:57 managed-node3 python3.12[50235]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:57 managed-node3 systemd[29953]: Started podman-50236.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 245. Jan 11 11:32:57 managed-node3 sudo[50232]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:57 managed-node3 sudo[50416]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tdgbjwxiyttiuyordlblnyombdfqxyjm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613177.5222197-15978-100401395843130/AnsiballZ_command.py' Jan 11 11:32:57 managed-node3 sudo[50416]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:57 managed-node3 systemd[4347]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 11 11:32:57 managed-node3 systemd[4347]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 11 11:32:57 managed-node3 python3.12[50419]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:57 managed-node3 systemd[4347]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 11 11:32:57 managed-node3 systemd[29953]: Started podman-50421.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 249. Jan 11 11:32:57 managed-node3 sudo[50416]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:58 managed-node3 sudo[50601]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zfteeifopenivhxazgitjcrcqfmyksel ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613178.0105646-15995-149678124235006/AnsiballZ_command.py' Jan 11 11:32:58 managed-node3 sudo[50601]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:58 managed-node3 python3.12[50604]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:58 managed-node3 systemd[29953]: Started podman-50605.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 253. Jan 11 11:32:58 managed-node3 sudo[50601]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:58 managed-node3 sudo[50785]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-egqdyqznzcadumsewivtsjrjifumgmnr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613178.4993725-16005-158070925322928/AnsiballZ_command.py' Jan 11 11:32:58 managed-node3 sudo[50785]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:58 managed-node3 python3.12[50788]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:58 managed-node3 systemd[29953]: Started podman-50789.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 257. Jan 11 11:32:58 managed-node3 sudo[50785]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:59 managed-node3 sudo[50968]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vcjkgnbljfkvojmkcfpyyzcplefoapjr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613178.9969528-16022-275256960740328/AnsiballZ_command.py' Jan 11 11:32:59 managed-node3 sudo[50968]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:59 managed-node3 python3.12[50971]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:59 managed-node3 systemd[29953]: Started podman-50972.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 261. Jan 11 11:32:59 managed-node3 sudo[50968]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:59 managed-node3 sudo[51151]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tffpdxsyygnrqjkhtigzdfnjhcvrldhj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613179.4939308-16032-34418897922330/AnsiballZ_command.py' Jan 11 11:32:59 managed-node3 sudo[51151]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:59 managed-node3 systemd[29953]: Started podman-51155.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 265. Jan 11 11:32:59 managed-node3 sudo[51151]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:00 managed-node3 sudo[51334]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fscctwpcinzpwlfpwctcegonfrkqcjmj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613180.0506632-16049-266233763334283/AnsiballZ_command.py' Jan 11 11:33:00 managed-node3 sudo[51334]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:00 managed-node3 systemd[29953]: Started podman-51338.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 269. Jan 11 11:33:00 managed-node3 sudo[51334]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:00 managed-node3 sudo[51519]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-udsrymbuyabvtxkxqugobfychaijeliv ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613180.5378177-16059-81830683970881/AnsiballZ_service_facts.py' Jan 11 11:33:00 managed-node3 sudo[51519]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:00 managed-node3 python3.12[51522]: ansible-service_facts Invoked Jan 11 11:33:03 managed-node3 sudo[51519]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:04 managed-node3 python3.12[51762]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:04 managed-node3 python3.12[51895]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:04 managed-node3 python3.12[52027]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:06 managed-node3 python3.12[52159]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:06 managed-node3 sudo[52334]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ldjuwyprxzfpwyfaxyjppbmidyuugedw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613186.1550627-16171-92779059159981/AnsiballZ_systemd.py' Jan 11 11:33:06 managed-node3 sudo[52334]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:06 managed-node3 python3.12[52337]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:33:06 managed-node3 systemd[29953]: Reload requested from client PID 52340 ('systemctl')... Jan 11 11:33:06 managed-node3 systemd[29953]: Reloading... Jan 11 11:33:06 managed-node3 systemd[29953]: Reloading finished in 38 ms. Jan 11 11:33:06 managed-node3 systemd[29953]: Stopped quadlet-basic-network.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 273 and the job result is done. Jan 11 11:33:06 managed-node3 sudo[52334]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:07 managed-node3 python3.12[52482]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:08 managed-node3 python3.12[52746]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:08 managed-node3 sudo[52919]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uagpywdhzgpfkgpmzzkbwklhzdowvpgn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613188.3371105-16230-126811755623597/AnsiballZ_systemd.py' Jan 11 11:33:08 managed-node3 sudo[52919]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:08 managed-node3 python3.12[52922]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:33:08 managed-node3 systemd[29953]: Reload requested from client PID 52923 ('systemctl')... Jan 11 11:33:08 managed-node3 systemd[29953]: Reloading... Jan 11 11:33:08 managed-node3 systemd[29953]: Reloading finished in 38 ms. Jan 11 11:33:08 managed-node3 sudo[52919]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:09 managed-node3 sudo[53105]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ipdrbmdtumurrrbmvwiajmaarzgvepjs ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613188.9794345-16240-232689960246747/AnsiballZ_command.py' Jan 11 11:33:09 managed-node3 sudo[53105]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:09 managed-node3 systemd[29953]: Started podman-53109.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 274. Jan 11 11:33:09 managed-node3 sudo[53105]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:09 managed-node3 sudo[53289]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aqejbavkjycnkiyqqillxjmdqurikwbm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613189.5653348-16259-178661027946329/AnsiballZ_command.py' Jan 11 11:33:09 managed-node3 sudo[53289]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:09 managed-node3 python3.12[53292]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:09 managed-node3 systemd[29953]: Started podman-53293.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 278. Jan 11 11:33:09 managed-node3 sudo[53289]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:10 managed-node3 sudo[53474]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kblqseblugdxumuavemjcnhlsjpiucdc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613190.2663293-16279-228207363507580/AnsiballZ_command.py' Jan 11 11:33:10 managed-node3 sudo[53474]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:10 managed-node3 python3.12[53477]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:10 managed-node3 systemd[29953]: Started podman-53478.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 282. Jan 11 11:33:10 managed-node3 sudo[53474]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:10 managed-node3 sudo[53658]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cvgpcsjxlblvmadnccrmzffcfqcdfian ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613190.7590194-16290-275426166329867/AnsiballZ_command.py' Jan 11 11:33:10 managed-node3 sudo[53658]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:11 managed-node3 python3.12[53661]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:11 managed-node3 systemd[29953]: Started podman-53662.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 286. Jan 11 11:33:11 managed-node3 sudo[53658]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:11 managed-node3 sudo[53842]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yupaxvwblgltssqbecalhawuglbxilox ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613191.248186-16300-13154183487945/AnsiballZ_command.py' Jan 11 11:33:11 managed-node3 sudo[53842]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:11 managed-node3 python3.12[53845]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:11 managed-node3 systemd[29953]: Started podman-53846.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 290. Jan 11 11:33:11 managed-node3 sudo[53842]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:11 managed-node3 sudo[54025]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-smzfeudlvapqwxkgpacsggbfyaedflqd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613191.7495422-16317-244322107034894/AnsiballZ_command.py' Jan 11 11:33:11 managed-node3 sudo[54025]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:12 managed-node3 python3.12[54028]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:12 managed-node3 systemd[29953]: Started podman-54029.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 294. Jan 11 11:33:12 managed-node3 sudo[54025]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:12 managed-node3 sudo[54208]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ywdnanuftawliasgrdtqjocctmttgiug ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613192.2355778-16327-272190748778280/AnsiballZ_command.py' Jan 11 11:33:12 managed-node3 sudo[54208]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:12 managed-node3 systemd[29953]: Started podman-54212.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 298. Jan 11 11:33:12 managed-node3 sudo[54208]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:12 managed-node3 sudo[54392]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yffewrjjxmcmqzexvgdkeswjccyikcvq ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613192.7367122-16352-220457878244959/AnsiballZ_command.py' Jan 11 11:33:12 managed-node3 sudo[54392]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:13 managed-node3 systemd[29953]: Started podman-54396.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 302. Jan 11 11:33:13 managed-node3 sudo[54392]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:13 managed-node3 sudo[54577]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tzrizccgbonxgktzuibkpvpcamseyvcz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613193.2396564-16370-163125541590530/AnsiballZ_service_facts.py' Jan 11 11:33:13 managed-node3 sudo[54577]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:13 managed-node3 python3.12[54580]: ansible-service_facts Invoked Jan 11 11:33:15 managed-node3 sudo[54577]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:15 managed-node3 python3.12[54820]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:16 managed-node3 sudo[54995]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lpwpxzcewnzrzdjjbahcijherplhozgw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613196.1108353-16461-66317049932168/AnsiballZ_podman_container_info.py' Jan 11 11:33:16 managed-node3 sudo[54995]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:16 managed-node3 python3.12[54998]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Jan 11 11:33:16 managed-node3 systemd[29953]: Started podman-54999.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 306. Jan 11 11:33:16 managed-node3 sudo[54995]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:16 managed-node3 sudo[55178]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hcoumwfkcpdyzhlkkblacyounsqvojbc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613196.746958-16491-29644121397386/AnsiballZ_command.py' Jan 11 11:33:16 managed-node3 sudo[55178]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:17 managed-node3 python3.12[55181]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:17 managed-node3 systemd[29953]: Started podman-55182.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 310. Jan 11 11:33:17 managed-node3 sudo[55178]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:17 managed-node3 sudo[55363]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-logbuefdyxfzdjpetmahsedrtciglngz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613197.281544-16508-135824060290071/AnsiballZ_command.py' Jan 11 11:33:17 managed-node3 sudo[55363]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:17 managed-node3 python3.12[55366]: ansible-ansible.legacy.command Invoked with _raw_params=podman secret ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:17 managed-node3 systemd[29953]: Started podman-55367.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 314. Jan 11 11:33:17 managed-node3 sudo[55363]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:18 managed-node3 python3.12[55505]: ansible-ansible.legacy.command Invoked with removes=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl disable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Jan 11 11:33:18 managed-node3 systemd[1]: Stopping user@1111.service - User Manager for UID 1111... ░░ Subject: A stop job for unit user@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@1111.service has begun execution. ░░ ░░ The job identifier is 1722. Jan 11 11:33:18 managed-node3 systemd[29953]: Activating special unit exit.target... Jan 11 11:33:18 managed-node3 systemd[29953]: Stopping podman-pause-60dc1641.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 334. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 329 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 325 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 328 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 330 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 332 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 335 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 339 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 336 and the job result is done. Jan 11 11:33:18 managed-node3 dbus-broker[30307]: Dispatched 4411 messages @ 2(±17)μs / message. ░░ Subject: Dispatched 4411 messages ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ This message is printed by dbus-broker when shutting down. It includes metric ░░ information collected during the runtime of dbus-broker. ░░ ░░ The message lists the number of dispatched messages ░░ (in this case 4411) as well as the mean time to ░░ handling a single message. The time measurements exclude the time spent on ░░ writing to and reading from the kernel. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopping dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 327. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 324 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped podman-pause-60dc1641.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 334 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Removed slice user.slice - Slice /user. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 333 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: user.slice: Consumed 8.951s CPU time, 472.3M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 327 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Removed slice session.slice - User Core Session Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 331 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 326 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 322 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 321. Jan 11 11:33:18 managed-node3 systemd[29953]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 319. Jan 11 11:33:18 managed-node3 systemd[29953]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 318. Jan 11 11:33:18 managed-node3 systemd[1]: user@1111.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@1111.service has successfully entered the 'dead' state. Jan 11 11:33:18 managed-node3 systemd[1]: Stopped user@1111.service - User Manager for UID 1111. ░░ Subject: A stop job for unit user@1111.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@1111.service has finished. ░░ ░░ The job identifier is 1722 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[1]: user@1111.service: Consumed 10.633s CPU time, 478.7M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@1111.service completed and consumed the indicated resources. Jan 11 11:33:18 managed-node3 systemd[1]: Stopping user-runtime-dir@1111.service - User Runtime Directory /run/user/1111... ░░ Subject: A stop job for unit user-runtime-dir@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@1111.service has begun execution. ░░ ░░ The job identifier is 1721. Jan 11 11:33:18 managed-node3 systemd[1]: run-user-1111.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-1111.mount has successfully entered the 'dead' state. Jan 11 11:33:18 managed-node3 systemd[1]: user-runtime-dir@1111.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@1111.service has successfully entered the 'dead' state. Jan 11 11:33:18 managed-node3 systemd[1]: Stopped user-runtime-dir@1111.service - User Runtime Directory /run/user/1111. ░░ Subject: A stop job for unit user-runtime-dir@1111.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@1111.service has finished. ░░ ░░ The job identifier is 1721 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[1]: Removed slice user-1111.slice - User Slice of UID 1111. ░░ Subject: A stop job for unit user-1111.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-1111.slice has finished. ░░ ░░ The job identifier is 1723 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[1]: user-1111.slice: Consumed 10.662s CPU time, 478.7M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-1111.slice completed and consumed the indicated resources. Jan 11 11:33:18 managed-node3 systemd-logind[657]: Removed session 6. ░░ Subject: Session 6 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 6 has been terminated. Jan 11 11:33:18 managed-node3 python3.12[55642]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:19 managed-node3 python3.12[55774]: ansible-ansible.legacy.systemd Invoked with name=systemd-logind state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Jan 11 11:33:19 managed-node3 systemd[1]: Stopping systemd-logind.service - User Login Management... ░░ Subject: A stop job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 1725. Jan 11 11:33:19 managed-node3 systemd[1]: systemd-logind.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-logind.service has successfully entered the 'dead' state. Jan 11 11:33:19 managed-node3 systemd[1]: Stopped systemd-logind.service - User Login Management. ░░ Subject: A stop job for unit systemd-logind.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-logind.service has finished. ░░ ░░ The job identifier is 1725 and the job result is done. Jan 11 11:33:19 managed-node3 python3.12[55920]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:19 managed-node3 systemd[1]: Starting modprobe@drm.service - Load Kernel Module drm... ░░ Subject: A start job for unit modprobe@drm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has begun execution. ░░ ░░ The job identifier is 1806. Jan 11 11:33:19 managed-node3 systemd[1]: modprobe@drm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@drm.service has successfully entered the 'dead' state. Jan 11 11:33:19 managed-node3 systemd[1]: Finished modprobe@drm.service - Load Kernel Module drm. ░░ Subject: A start job for unit modprobe@drm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has finished successfully. ░░ ░░ The job identifier is 1806. Jan 11 11:33:19 managed-node3 systemd[1]: Starting systemd-logind.service - User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 1726. Jan 11 11:33:19 managed-node3 systemd-logind[55924]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Jan 11 11:33:19 managed-node3 systemd-logind[55924]: Watching system buttons on /dev/input/event0 (Power Button) Jan 11 11:33:19 managed-node3 systemd-logind[55924]: Watching system buttons on /dev/input/event1 (Sleep Button) Jan 11 11:33:19 managed-node3 systemd-logind[55924]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Jan 11 11:33:19 managed-node3 systemd[1]: Started systemd-logind.service - User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 1726. Jan 11 11:33:20 managed-node3 python3.12[56061]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_basic follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:22 managed-node3 python3.12[56323]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:23 managed-node3 python3.12[56460]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 11 11:33:24 managed-node3 python3.12[56592]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:27 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:27 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:28 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: Stopping session-3.scope - Session 3 of User root... ░░ Subject: A stop job for unit session-3.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-3.scope has begun execution. ░░ ░░ The job identifier is 1888. Jan 11 11:33:29 managed-node3 sshd-session[4394]: error: mm_reap: preauth child terminated by signal 15 Jan 11 11:33:29 managed-node3 systemd[1]: Stopping session-5.scope - Session 5 of User root... ░░ Subject: A stop job for unit session-5.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-5.scope has begun execution. ░░ ░░ The job identifier is 1889. Jan 11 11:33:29 managed-node3 sshd-session[6522]: error: mm_reap: preauth child terminated by signal 15 Jan 11 11:33:29 managed-node3 sshd-session[4394]: pam_systemd(sshd:session): Failed to release session: No session '3' known Jan 11 11:33:29 managed-node3 sshd-session[4394]: pam_unix(sshd:session): session closed for user root Jan 11 11:33:29 managed-node3 sshd-session[6522]: pam_systemd(sshd:session): Failed to release session: No session '5' known Jan 11 11:33:29 managed-node3 sshd-session[6522]: pam_unix(sshd:session): session closed for user root Jan 11 11:33:29 managed-node3 systemd[1]: session-3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: Stopped session-3.scope - Session 3 of User root. ░░ Subject: A stop job for unit session-3.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-3.scope has finished. ░░ ░░ The job identifier is 1888 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[1]: session-3.scope: Consumed 3.158s CPU time, 86.2M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope completed and consumed the indicated resources. Jan 11 11:33:29 managed-node3 systemd[1]: session-5.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-5.scope has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: Stopped session-5.scope - Session 5 of User root. ░░ Subject: A stop job for unit session-5.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-5.scope has finished. ░░ ░░ The job identifier is 1889 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[1]: session-5.scope: Consumed 2min 17.198s CPU time, 389.5M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-5.scope completed and consumed the indicated resources. Jan 11 11:33:29 managed-node3 systemd[1]: Stopping user@0.service - User Manager for UID 0... ░░ Subject: A stop job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 1890. Jan 11 11:33:29 managed-node3 systemd[4347]: Activating special unit exit.target... Jan 11 11:33:29 managed-node3 systemd[4347]: Removed slice background.slice - User Background Tasks Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 21 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 27 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 29 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 23 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 33 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 31 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 24 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 32 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 26 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 34 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 11 11:33:29 managed-node3 systemd[4347]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Jan 11 11:33:29 managed-node3 systemd[4347]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 17. Jan 11 11:33:29 managed-node3 systemd[1]: user@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@0.service has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: Stopped user@0.service - User Manager for UID 0. ░░ Subject: A stop job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has finished. ░░ ░░ The job identifier is 1890 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[1]: Stopping user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A stop job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 1887. Jan 11 11:33:29 managed-node3 systemd[1]: run-user-0.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-0.mount has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: user-runtime-dir@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@0.service has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: Stopped user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A stop job for unit user-runtime-dir@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has finished. ░░ ░░ The job identifier is 1887 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[1]: Removed slice user-0.slice - User Slice of UID 0. ░░ Subject: A stop job for unit user-0.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-0.slice has finished. ░░ ░░ The job identifier is 1891 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[1]: user-0.slice: Consumed 2min 20.700s CPU time, 455M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-0.slice completed and consumed the indicated resources. Jan 11 11:33:29 managed-node3 sshd-session[56896]: Accepted publickey for root from 10.31.14.128 port 56984 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Jan 11 11:33:29 managed-node3 systemd[1]: Created slice user-0.slice - User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 1894. Jan 11 11:33:29 managed-node3 systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 1893. Jan 11 11:33:29 managed-node3 systemd-logind[55924]: New session 7 of user root. ░░ Subject: A new session 7 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 7 has been created for the user root. ░░ ░░ The leading process of the session is 56896. Jan 11 11:33:29 managed-node3 systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 1893. Jan 11 11:33:29 managed-node3 systemd[1]: Starting user@0.service - User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 1973. Jan 11 11:33:29 managed-node3 systemd-logind[55924]: New session 8 of user root. ░░ Subject: A new session 8 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 8 has been created for the user root. ░░ ░░ The leading process of the session is 56904. Jan 11 11:33:29 managed-node3 (systemd)[56904]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Jan 11 11:33:29 managed-node3 systemd[56904]: Queued start job for default target default.target. Jan 11 11:33:29 managed-node3 systemd[56904]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 11 11:33:29 managed-node3 systemd[56904]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 11 11:33:29 managed-node3 systemd[56904]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Jan 11 11:33:29 managed-node3 systemd[56904]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Jan 11 11:33:29 managed-node3 systemd[56904]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 11 11:33:29 managed-node3 systemd[56904]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Jan 11 11:33:29 managed-node3 systemd[56904]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jan 11 11:33:29 managed-node3 systemd[56904]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 11 11:33:29 managed-node3 systemd[56904]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 11 11:33:29 managed-node3 systemd[56904]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 11 11:33:29 managed-node3 systemd[56904]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 11 11:33:29 managed-node3 systemd[56904]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 11 11:33:29 managed-node3 systemd[56904]: Startup finished in 106ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 106977 microseconds. Jan 11 11:33:29 managed-node3 systemd[1]: Started user@0.service - User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 1973. Jan 11 11:33:30 managed-node3 systemd[1]: Started session-7.scope - Session 7 of User root. ░░ Subject: A start job for unit session-7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-7.scope has finished successfully. ░░ ░░ The job identifier is 2054. Jan 11 11:33:30 managed-node3 sshd-session[56896]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 11 11:33:31 managed-node3 python3.12[57095]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 11 11:33:32 managed-node3 python3.12[57255]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:32 managed-node3 python3.12[57386]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:34 managed-node3 python3.12[57648]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:35 managed-node3 python3.12[57786]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 11 11:33:36 managed-node3 python3.12[57918]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:38 managed-node3 python3.12[58051]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:39 managed-node3 python3.12[58184]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:39 managed-node3 python3.12[58315]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:33:40 managed-node3 python3.12[58420]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613219.4797897-17447-31959509434284/.source.pod dest=/etc/containers/systemd/quadlet-pod-pod.pod owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:41 managed-node3 python3.12[58551]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:33:41 managed-node3 systemd[1]: Reload requested from client PID 58552 ('systemctl') (unit session-7.scope)... Jan 11 11:33:41 managed-node3 systemd[1]: Reloading... Jan 11 11:33:41 managed-node3 systemd-rc-local-generator[58594]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:33:41 managed-node3 systemd-ssh-generator[58596]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:33:41 managed-node3 (sd-exec-[58569]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:33:41 managed-node3 systemd[1]: Reloading finished in 194 ms. Jan 11 11:33:41 managed-node3 python3.12[58734]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:33:41 managed-node3 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2136. Jan 11 11:33:41 managed-node3 systemd[1]: var-lib-containers-storage-overlay-compat2897760179-lower\x2dmapped.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat2897760179-lower\x2dmapped.mount has successfully entered the 'dead' state. Jan 11 11:33:42 managed-node3 podman[58738]: 2025-01-11 11:33:42.161976365 -0500 EST m=+0.307686542 image build 46a99829a23feee54ca5ee50428836042f880fe5158b9e9e380af827fc20c994 Jan 11 11:33:42 managed-node3 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 2221. Jan 11 11:33:42 managed-node3 systemd[1]: Created slice machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice - cgroup machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice. ░░ Subject: A start job for unit machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice has finished successfully. ░░ ░░ The job identifier is 2220. Jan 11 11:33:42 managed-node3 podman[58738]: 2025-01-11 11:33:42.214500844 -0500 EST m=+0.360211079 container create 6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:42 managed-node3 podman[58738]: 2025-01-11 11:33:42.223918114 -0500 EST m=+0.369628182 pod create f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16 (image=, name=quadlet-pod) Jan 11 11:33:42 managed-node3 quadlet-pod-pod-pod[58738]: f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16 Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:42 managed-node3 kernel: veth0: entered allmulticast mode Jan 11 11:33:42 managed-node3 kernel: veth0: entered promiscuous mode Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.2725] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.2795] device (podman0): carrier: link connected Jan 11 11:33:42 managed-node3 (udev-worker)[58803]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.2800] device (veth0): carrier: link connected Jan 11 11:33:42 managed-node3 (udev-worker)[58802]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.2833] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3105] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3110] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3116] device (podman0): Activation: starting connection 'podman0' (cc3b408c-6ff4-4bc3-afaa-024a4ecaf30b) Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3117] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3120] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3121] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3123] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:33:42 managed-node3 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2226. Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3459] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3462] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3467] device (podman0): Activation: successful, device activated. Jan 11 11:33:42 managed-node3 systemd[1]: Started libpod-6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d.scope - libcrun container. ░░ Subject: A start job for unit libpod-6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d.scope has finished successfully. ░░ ░░ The job identifier is 2305. Jan 11 11:33:42 managed-node3 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2226. Jan 11 11:33:42 managed-node3 podman[58792]: 2025-01-11 11:33:42.384135577 -0500 EST m=+0.145163827 container init 6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:42 managed-node3 podman[58792]: 2025-01-11 11:33:42.387065432 -0500 EST m=+0.148093789 container start 6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:42 managed-node3 systemd[1]: libpod-6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d.scope has successfully entered the 'dead' state. Jan 11 11:33:42 managed-node3 podman[58792]: 2025-01-11 11:33:42.396752198 -0500 EST m=+0.157780383 pod start f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16 (image=, name=quadlet-pod) Jan 11 11:33:42 managed-node3 quadlet-pod-pod-pod[58792]: quadlet-pod Jan 11 11:33:42 managed-node3 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2136. Jan 11 11:33:42 managed-node3 podman[58848]: 2025-01-11 11:33:42.433638423 -0500 EST m=+0.035132717 container died 6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:42 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:33:42 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.4700] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:33:42 managed-node3 podman[58848]: 2025-01-11 11:33:42.524515891 -0500 EST m=+0.126010022 container cleanup 6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:42 managed-node3 podman[58848]: 2025-01-11 11:33:42.525432713 -0500 EST m=+0.126927037 pod stop f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16 (image=, name=quadlet-pod) Jan 11 11:33:42 managed-node3 systemd[1]: Removed slice machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice - cgroup machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice. ░░ Subject: A stop job for unit machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice has finished. ░░ ░░ The job identifier is 2312 and the job result is done. Jan 11 11:33:42 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-pod-pod-pod.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:33:42 managed-node3 podman[58892]: 2025-01-11 11:33:42.661148895 -0500 EST m=+0.097324272 container remove 6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:42 managed-node3 podman[58892]: 2025-01-11 11:33:42.674472285 -0500 EST m=+0.110647644 pod remove f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16 (image=, name=quadlet-pod) Jan 11 11:33:42 managed-node3 quadlet-pod-pod-pod[58892]: f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16 Jan 11 11:33:42 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:42 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Scheduled restart job, restart counter is at 1. ░░ Subject: Automatic restarting of a unit has been scheduled ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Automatic restarting of the unit quadlet-pod-pod-pod.service has been scheduled, as the result for ░░ the configured Restart= setting for the unit. Jan 11 11:33:42 managed-node3 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2314. Jan 11 11:33:42 managed-node3 systemd[1]: Created slice machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice - cgroup machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice. ░░ Subject: A start job for unit machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice has finished successfully. ░░ ░░ The job identifier is 2398. Jan 11 11:33:42 managed-node3 podman[58903]: 2025-01-11 11:33:42.923499699 -0500 EST m=+0.077152560 container create 54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:42 managed-node3 podman[58903]: 2025-01-11 11:33:42.930487429 -0500 EST m=+0.084140382 pod create bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56 (image=, name=quadlet-pod) Jan 11 11:33:42 managed-node3 quadlet-pod-pod-pod[58903]: bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56 Jan 11 11:33:42 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:42 managed-node3 (udev-worker)[58814]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.9899] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:42 managed-node3 kernel: veth0: entered allmulticast mode Jan 11 11:33:42 managed-node3 kernel: veth0: entered promiscuous mode Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Jan 11 11:33:43 managed-node3 (udev-worker)[58824]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0004] device (podman0): carrier: link connected Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0008] device (veth0): carrier: link connected Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0011] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0146] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0156] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0169] device (podman0): Activation: starting connection 'podman0' (c078faf4-836c-4380-b46d-53e3e0090860) Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0171] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0187] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0192] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0196] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0361] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0365] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0375] device (podman0): Activation: successful, device activated. Jan 11 11:33:43 managed-node3 systemd[1]: Started libpod-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27.scope - libcrun container. ░░ Subject: A start job for unit libpod-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27.scope has finished successfully. ░░ ░░ The job identifier is 2404. Jan 11 11:33:43 managed-node3 podman[58912]: 2025-01-11 11:33:43.094364165 -0500 EST m=+0.142302923 container init 54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 podman[58912]: 2025-01-11 11:33:43.097976845 -0500 EST m=+0.145915491 container start 54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 systemd[1]: libpod-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27.scope has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 podman[58912]: 2025-01-11 11:33:43.106739364 -0500 EST m=+0.154678108 pod start bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56 (image=, name=quadlet-pod) Jan 11 11:33:43 managed-node3 quadlet-pod-pod-pod[58912]: quadlet-pod Jan 11 11:33:43 managed-node3 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2314. Jan 11 11:33:43 managed-node3 podman[58988]: 2025-01-11 11:33:43.148845669 -0500 EST m=+0.033797455 container died 54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:43 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:33:43 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.1907] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:33:43 managed-node3 podman[58988]: 2025-01-11 11:33:43.245686434 -0500 EST m=+0.130638237 container cleanup 54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 podman[58988]: 2025-01-11 11:33:43.250670322 -0500 EST m=+0.135622172 pod stop bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56 (image=, name=quadlet-pod) Jan 11 11:33:43 managed-node3 systemd[1]: Removed slice machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice - cgroup machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice. ░░ Subject: A stop job for unit machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice has finished. ░░ ░░ The job identifier is 2411 and the job result is done. Jan 11 11:33:43 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-pod-pod-pod.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:33:43 managed-node3 python3.12[59072]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:43 managed-node3 podman[59075]: 2025-01-11 11:33:43.460390803 -0500 EST m=+0.065346504 container remove 54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 podman[59075]: 2025-01-11 11:33:43.470818596 -0500 EST m=+0.075774258 pod remove bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56 (image=, name=quadlet-pod) Jan 11 11:33:43 managed-node3 quadlet-pod-pod-pod[59075]: bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56 Jan 11 11:33:43 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:43 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Scheduled restart job, restart counter is at 2. ░░ Subject: Automatic restarting of a unit has been scheduled ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Automatic restarting of the unit quadlet-pod-pod-pod.service has been scheduled, as the result for ░░ the configured Restart= setting for the unit. Jan 11 11:33:43 managed-node3 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2413. Jan 11 11:33:43 managed-node3 systemd[1]: Created slice machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice - cgroup machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice. ░░ Subject: A start job for unit machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice has finished successfully. ░░ ░░ The job identifier is 2497. Jan 11 11:33:43 managed-node3 podman[59108]: 2025-01-11 11:33:43.665895514 -0500 EST m=+0.067028463 container create 26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 podman[59108]: 2025-01-11 11:33:43.670976812 -0500 EST m=+0.072109809 pod create a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67 (image=, name=quadlet-pod) Jan 11 11:33:43 managed-node3 quadlet-pod-pod-pod[59108]: a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67 Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:43 managed-node3 kernel: veth0: entered allmulticast mode Jan 11 11:33:43 managed-node3 kernel: veth0: entered promiscuous mode Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7221] device (podman0): carrier: link connected Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7224] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/7) Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7232] device (veth0): carrier: link connected Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7247] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7488] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7497] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7505] device (podman0): Activation: starting connection 'podman0' (65b239aa-8b9a-4693-af2d-f34968395b1b) Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7509] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7514] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7517] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7521] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7541] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7542] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7546] device (podman0): Activation: successful, device activated. Jan 11 11:33:43 managed-node3 systemd[1]: Started libpod-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1.scope - libcrun container. ░░ Subject: A start job for unit libpod-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1.scope has finished successfully. ░░ ░░ The job identifier is 2503. Jan 11 11:33:43 managed-node3 podman[59117]: 2025-01-11 11:33:43.802088126 -0500 EST m=+0.116118046 container init 26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 podman[59117]: 2025-01-11 11:33:43.804936417 -0500 EST m=+0.118966414 container start 26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 systemd[1]: libpod-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1.scope has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 podman[59117]: 2025-01-11 11:33:43.812660115 -0500 EST m=+0.126690086 pod start a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67 (image=, name=quadlet-pod) Jan 11 11:33:43 managed-node3 quadlet-pod-pod-pod[59117]: quadlet-pod Jan 11 11:33:43 managed-node3 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2413. Jan 11 11:33:43 managed-node3 podman[59148]: 2025-01-11 11:33:43.842204827 -0500 EST m=+0.022015288 container died 26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:43 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:33:43 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:43 managed-node3 systemd[1]: run-netns-netns\x2d4ff228e8\x2d3ef0\x2d5a40\x2d0a90\x2d5d04e49b142b.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d4ff228e8\x2d3ef0\x2d5a40\x2d0a90\x2d5d04e49b142b.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 systemd[1]: var-lib-containers-storage-overlay-d3a043c760e53579abc09563f308ecda214f4d8c2fb56392504f327f596d81be-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-d3a043c760e53579abc09563f308ecda214f4d8c2fb56392504f327f596d81be-merged.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27-userdata-shm.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.8791] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:33:43 managed-node3 systemd[1]: run-netns-netns\x2d52167144\x2d26e4\x2d8553\x2dc4ab\x2ddbe341587c2d.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d52167144\x2d26e4\x2d8553\x2dc4ab\x2ddbe341587c2d.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1-userdata-shm.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 systemd[1]: var-lib-containers-storage-overlay-4ace597ac1e8b5c7a95aa85a9f9a6c385ad9668276e22ab87a26bb34935edeee-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-4ace597ac1e8b5c7a95aa85a9f9a6c385ad9668276e22ab87a26bb34935edeee-merged.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 podman[59148]: 2025-01-11 11:33:43.928041874 -0500 EST m=+0.107852343 container cleanup 26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 podman[59148]: 2025-01-11 11:33:43.929103673 -0500 EST m=+0.108914146 pod stop a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67 (image=, name=quadlet-pod) Jan 11 11:33:43 managed-node3 systemd[1]: Removed slice machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice - cgroup machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice. ░░ Subject: A stop job for unit machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice has finished. ░░ ░░ The job identifier is 2510 and the job result is done. Jan 11 11:33:43 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-pod-pod-pod.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:33:44 managed-node3 podman[59171]: 2025-01-11 11:33:44.092865512 -0500 EST m=+0.121909350 container remove 26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:44 managed-node3 podman[59171]: 2025-01-11 11:33:44.103290226 -0500 EST m=+0.132334060 pod remove a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67 (image=, name=quadlet-pod) Jan 11 11:33:44 managed-node3 quadlet-pod-pod-pod[59171]: a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67 Jan 11 11:33:44 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:44 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Scheduled restart job, restart counter is at 3. ░░ Subject: Automatic restarting of a unit has been scheduled ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Automatic restarting of the unit quadlet-pod-pod-pod.service has been scheduled, as the result for ░░ the configured Restart= setting for the unit. Jan 11 11:33:44 managed-node3 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2512. Jan 11 11:33:44 managed-node3 systemd[1]: Created slice machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice - cgroup machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice. ░░ Subject: A start job for unit machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice has finished successfully. ░░ ░░ The job identifier is 2596. Jan 11 11:33:44 managed-node3 podman[59181]: 2025-01-11 11:33:44.431125744 -0500 EST m=+0.084935362 container create b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:44 managed-node3 podman[59181]: 2025-01-11 11:33:44.439107554 -0500 EST m=+0.092917156 pod create fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e (image=, name=quadlet-pod) Jan 11 11:33:44 managed-node3 quadlet-pod-pod-pod[59181]: fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e Jan 11 11:33:44 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:44 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:44 managed-node3 kernel: veth0: entered allmulticast mode Jan 11 11:33:44 managed-node3 kernel: veth0: entered promiscuous mode Jan 11 11:33:44 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:44 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.4955] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5018] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5038] device (veth0): carrier: link connected Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5055] device (podman0): carrier: link connected Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5287] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5294] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5303] device (podman0): Activation: starting connection 'podman0' (24ea35ac-2e29-4912-9ce3-1316d64cdeea) Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5305] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5310] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5312] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5316] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5328] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5330] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5335] device (podman0): Activation: successful, device activated. Jan 11 11:33:44 managed-node3 systemd[1]: Started libpod-b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d.scope - libcrun container. ░░ Subject: A start job for unit libpod-b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d.scope has finished successfully. ░░ ░░ The job identifier is 2602. Jan 11 11:33:44 managed-node3 podman[59236]: 2025-01-11 11:33:44.57596445 -0500 EST m=+0.114151936 container init b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:44 managed-node3 podman[59236]: 2025-01-11 11:33:44.57891896 -0500 EST m=+0.117106429 container start b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:44 managed-node3 systemd[1]: libpod-b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d.scope has successfully entered the 'dead' state. Jan 11 11:33:44 managed-node3 podman[59236]: 2025-01-11 11:33:44.58689249 -0500 EST m=+0.125079901 pod start fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e (image=, name=quadlet-pod) Jan 11 11:33:44 managed-node3 quadlet-pod-pod-pod[59236]: quadlet-pod Jan 11 11:33:44 managed-node3 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2512. Jan 11 11:33:44 managed-node3 podman[59286]: 2025-01-11 11:33:44.627235873 -0500 EST m=+0.036804689 container died b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:44 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:44 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:33:44 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:33:44 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.6709] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:33:44 managed-node3 podman[59286]: 2025-01-11 11:33:44.732822318 -0500 EST m=+0.142391055 pod stop fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e (image=, name=quadlet-pod) Jan 11 11:33:44 managed-node3 podman[59286]: 2025-01-11 11:33:44.733345838 -0500 EST m=+0.142914621 container cleanup b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:44 managed-node3 systemd[1]: Removed slice machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice - cgroup machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice. ░░ Subject: A stop job for unit machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice has finished. ░░ ░░ The job identifier is 2609 and the job result is done. Jan 11 11:33:44 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-pod-pod-pod.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:33:44 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:44 managed-node3 podman[59359]: 2025-01-11 11:33:44.964607034 -0500 EST m=+0.076955461 container remove b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:44 managed-node3 podman[59359]: 2025-01-11 11:33:44.978722022 -0500 EST m=+0.091069993 pod remove fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e (image=, name=quadlet-pod) Jan 11 11:33:44 managed-node3 quadlet-pod-pod-pod[59359]: fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e Jan 11 11:33:44 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:44 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:45 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Scheduled restart job, restart counter is at 4. ░░ Subject: Automatic restarting of a unit has been scheduled ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Automatic restarting of the unit quadlet-pod-pod-pod.service has been scheduled, as the result for ░░ the configured Restart= setting for the unit. Jan 11 11:33:45 managed-node3 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2611. Jan 11 11:33:45 managed-node3 systemd[1]: Created slice machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice - cgroup machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice. ░░ Subject: A start job for unit machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice has finished successfully. ░░ ░░ The job identifier is 2695. Jan 11 11:33:45 managed-node3 podman[59378]: 2025-01-11 11:33:45.413300507 -0500 EST m=+0.066708719 container create cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:45 managed-node3 podman[59378]: 2025-01-11 11:33:45.418970807 -0500 EST m=+0.072378991 pod create 82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e (image=, name=quadlet-pod) Jan 11 11:33:45 managed-node3 quadlet-pod-pod-pod[59378]: 82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.4695] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/11) Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:45 managed-node3 kernel: veth0: entered allmulticast mode Jan 11 11:33:45 managed-node3 kernel: veth0: entered promiscuous mode Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.4814] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/12) Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.4822] device (veth0): carrier: link connected Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.4836] device (podman0): carrier: link connected Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5046] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5062] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5069] device (podman0): Activation: starting connection 'podman0' (829234d2-499e-4deb-9a30-f8535ff1629d) Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5070] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5072] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5074] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5076] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5106] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5108] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5112] device (podman0): Activation: successful, device activated. Jan 11 11:33:45 managed-node3 systemd[1]: Started libpod-cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6.scope - libcrun container. ░░ Subject: A start job for unit libpod-cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6.scope has finished successfully. ░░ ░░ The job identifier is 2701. Jan 11 11:33:45 managed-node3 podman[59386]: 2025-01-11 11:33:45.564980188 -0500 EST m=+0.124304861 container init cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:45 managed-node3 podman[59386]: 2025-01-11 11:33:45.567724137 -0500 EST m=+0.127049102 container start cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:45 managed-node3 systemd[1]: libpod-cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6.scope has successfully entered the 'dead' state. Jan 11 11:33:45 managed-node3 podman[59386]: 2025-01-11 11:33:45.574299577 -0500 EST m=+0.133624167 pod start 82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e (image=, name=quadlet-pod) Jan 11 11:33:45 managed-node3 quadlet-pod-pod-pod[59386]: quadlet-pod Jan 11 11:33:45 managed-node3 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2611. Jan 11 11:33:45 managed-node3 podman[59418]: 2025-01-11 11:33:45.605089227 -0500 EST m=+0.023503344 container died cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:45 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:33:45 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.6413] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:33:45 managed-node3 podman[59418]: 2025-01-11 11:33:45.691013863 -0500 EST m=+0.109427986 container cleanup cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:45 managed-node3 podman[59418]: 2025-01-11 11:33:45.692958697 -0500 EST m=+0.111372736 pod stop 82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e (image=, name=quadlet-pod) Jan 11 11:33:45 managed-node3 systemd[1]: Removed slice machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice - cgroup machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice. ░░ Subject: A stop job for unit machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice has finished. ░░ ░░ The job identifier is 2708 and the job result is done. Jan 11 11:33:45 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-pod-pod-pod.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:33:45 managed-node3 podman[59441]: 2025-01-11 11:33:45.848995946 -0500 EST m=+0.122184958 container remove cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:45 managed-node3 podman[59441]: 2025-01-11 11:33:45.860106345 -0500 EST m=+0.133295323 pod remove 82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e (image=, name=quadlet-pod) Jan 11 11:33:45 managed-node3 quadlet-pod-pod-pod[59441]: 82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e Jan 11 11:33:45 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:46 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Scheduled restart job, restart counter is at 5. ░░ Subject: Automatic restarting of a unit has been scheduled ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Automatic restarting of the unit quadlet-pod-pod-pod.service has been scheduled, as the result for ░░ the configured Restart= setting for the unit. Jan 11 11:33:46 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Start request repeated too quickly. Jan 11 11:33:46 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:46 managed-node3 systemd[1]: Failed to start quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished with a failure. ░░ ░░ The job identifier is 2710 and the job result is failed. Jan 11 11:33:47 managed-node3 podman[59366]: 2025-01-11 11:33:47.404358497 -0500 EST m=+2.491863471 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 11 11:33:47 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:47 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:47 managed-node3 python3.12[59607]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:48 managed-node3 python3.12[59738]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:33:48 managed-node3 python3.12[59843]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613228.00213-17713-248510374954123/.source.container dest=/etc/containers/systemd/quadlet-pod-container.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:49 managed-node3 python3.12[59974]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:33:49 managed-node3 systemd[1]: Reload requested from client PID 59975 ('systemctl') (unit session-7.scope)... Jan 11 11:33:49 managed-node3 systemd[1]: Reloading... Jan 11 11:33:49 managed-node3 systemd-rc-local-generator[60018]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:33:49 managed-node3 systemd-ssh-generator[60020]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:33:49 managed-node3 (sd-exec-[59993]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:33:49 managed-node3 systemd[1]: Reloading finished in 205 ms. Jan 11 11:33:49 managed-node3 python3.12[60158]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:33:49 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Start request repeated too quickly. Jan 11 11:33:49 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:49 managed-node3 systemd[1]: Failed to start quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished with a failure. ░░ ░░ The job identifier is 2795 and the job result is failed. Jan 11 11:33:49 managed-node3 systemd[1]: Dependency failed for quadlet-pod-container.service. ░░ Subject: A start job for unit quadlet-pod-container.service has failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-container.service has finished with a failure. ░░ ░░ The job identifier is 2794 and the job result is dependency. Jan 11 11:33:49 managed-node3 systemd[1]: quadlet-pod-container.service: Job quadlet-pod-container.service/start failed with result 'dependency'. Jan 11 11:33:50 managed-node3 python3.12[60292]: ansible-ansible.legacy.command Invoked with _raw_params=set -x set -o pipefail exec 1>&2 #podman volume rm --all #podman network prune -f podman volume ls podman network ls podman secret ls podman container ls podman pod ls podman images systemctl list-units | grep quadlet systemctl list-unit-files | grep quadlet ls -alrtF /etc/containers/systemd /usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:50 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:50 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:51 managed-node3 python3.12[60475]: ansible-ansible.legacy.command Invoked with _raw_params=grep type=AVC /var/log/audit/audit.log _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:51 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:51 managed-node3 python3.12[60608]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Cleanup user] ************************************************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:159 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.495) 0:00:20.959 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.125) 0:00:21.085 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.090) 0:00:21.175 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.042) 0:00:21.218 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.030) 0:00:21.249 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 11 January 2025 11:33:52 -0500 (0:00:00.028) 0:00:21.277 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 11 January 2025 11:33:52 -0500 (0:00:00.028) 0:00:21.306 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 11 January 2025 11:33:52 -0500 (0:00:00.027) 0:00:21.334 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 11 January 2025 11:33:52 -0500 (0:00:00.067) 0:00:21.401 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.853) 0:00:22.254 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.034) 0:00:22.289 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.036) 0:00:22.325 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.030) 0:00:22.355 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.029) 0:00:22.385 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.028) 0:00:22.413 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025307", "end": "2025-01-11 11:33:53.491165", "rc": 0, "start": "2025-01-11 11:33:53.465858" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.393) 0:00:22.807 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.030) 0:00:22.838 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.028) 0:00:22.866 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.033) 0:00:22.899 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.035) 0:00:22.935 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.096) 0:00:23.032 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.050) 0:00:23.083 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.058) 0:00:23.142 ****** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "user_quadlet_pod": null } }, "changed": false } MSG: One or more supplied key could not be found in the database. TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:33:54 -0500 (0:00:00.389) 0:00:23.531 ****** fatal: [managed-node3]: FAILED! => { "changed": false } MSG: The given podman user [user_quadlet_pod] does not exist - cannot continue TASK [Dump journal] ************************************************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:194 Saturday 11 January 2025 11:33:54 -0500 (0:00:00.040) 0:00:23.571 ****** fatal: [managed-node3]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.029016", "end": "2025-01-11 11:33:54.639098", "failed_when_result": true, "rc": 0, "start": "2025-01-11 11:33:54.610082" } STDOUT: Jan 11 11:30:44 managed-node3 python3.12[25813]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:46 managed-node3 systemd[1]: var-lib-containers-storage-overlay-compat3372456078-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat3372456078-merged.mount has successfully entered the 'dead' state. Jan 11 11:30:46 managed-node3 kernel: evm: overlay not supported Jan 11 11:30:46 managed-node3 podman[25947]: 2025-01-11 11:30:46.877429908 -0500 EST m=+0.066758337 system refresh Jan 11 11:30:47 managed-node3 podman[25957]: 2025-01-11 11:30:47.094679943 -0500 EST m=+0.112971453 image pull-error this_is_a_bogus_image:latest short-name resolution enforced but cannot prompt without a TTY Jan 11 11:30:47 managed-node3 python3.12[26095]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:30:47 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:30:48 managed-node3 python3.12[26226]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:30:48 managed-node3 python3.12[26331]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613047.7212226-11731-256101618506701/.source.container dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:30:50 managed-node3 python3.12[26593]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:51 managed-node3 python3.12[26730]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:53 managed-node3 python3.12[26863]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:55 managed-node3 python3.12[26996]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:30:55 managed-node3 python3.12[27128]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:56 managed-node3 python3.12[27392]: ansible-file Invoked with path=/etc/containers/systemd/nopull.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:30:57 managed-node3 python3.12[27523]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:30:57 managed-node3 systemd[1]: Reload requested from client PID 27524 ('systemctl') (unit session-5.scope)... Jan 11 11:30:57 managed-node3 systemd[1]: Reloading... Jan 11 11:30:57 managed-node3 systemd-ssh-generator[27559]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:30:57 managed-node3 (sd-exec-[27542]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:30:57 managed-node3 systemd-rc-local-generator[27557]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:30:57 managed-node3 systemd[1]: Reloading finished in 197 ms. Jan 11 11:30:58 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:01 managed-node3 python3.12[27976]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:02 managed-node3 python3.12[28113]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:04 managed-node3 python3.12[28246]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:05 managed-node3 python3.12[28379]: ansible-systemd Invoked with name=bogus.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:31:05 managed-node3 systemd[1]: Reload requested from client PID 28382 ('systemctl') (unit session-5.scope)... Jan 11 11:31:05 managed-node3 systemd[1]: Reloading... Jan 11 11:31:05 managed-node3 quadlet-generator[27546]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details. Jan 11 11:31:05 managed-node3 quadlet-generator[28404]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details. Jan 11 11:31:05 managed-node3 systemd-ssh-generator[28427]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:05 managed-node3 systemd-rc-local-generator[28425]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:05 managed-node3 (sd-exec-[28400]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:06 managed-node3 systemd[1]: Reloading finished in 196 ms. Jan 11 11:31:06 managed-node3 python3.12[28566]: ansible-stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:07 managed-node3 python3.12[28830]: ansible-file Invoked with path=/etc/containers/systemd/bogus.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:07 managed-node3 python3.12[28961]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:07 managed-node3 systemd[1]: Reload requested from client PID 28962 ('systemctl') (unit session-5.scope)... Jan 11 11:31:07 managed-node3 systemd[1]: Reloading... Jan 11 11:31:08 managed-node3 systemd-rc-local-generator[29005]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:08 managed-node3 systemd-ssh-generator[29007]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:08 managed-node3 (sd-exec-[28980]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:08 managed-node3 systemd[1]: Reloading finished in 193 ms. Jan 11 11:31:08 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:09 managed-node3 python3.12[29283]: ansible-user Invoked with name=user_quadlet_basic uid=1111 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jan 11 11:31:09 managed-node3 useradd[29285]: new group: name=user_quadlet_basic, GID=1111 Jan 11 11:31:09 managed-node3 useradd[29285]: new user: name=user_quadlet_basic, UID=1111, GID=1111, home=/home/user_quadlet_basic, shell=/bin/bash, from=/dev/pts/0 Jan 11 11:31:12 managed-node3 python3.12[29547]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:13 managed-node3 python3.12[29684]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:15 managed-node3 python3.12[29817]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None Jan 11 11:31:16 managed-node3 python3.12[29949]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:16 managed-node3 systemd[1]: Created slice user-1111.slice - User Slice of UID 1111. ░░ Subject: A start job for unit user-1111.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-1111.slice has finished successfully. ░░ ░░ The job identifier is 1546. Jan 11 11:31:16 managed-node3 systemd[1]: Starting user-runtime-dir@1111.service - User Runtime Directory /run/user/1111... ░░ Subject: A start job for unit user-runtime-dir@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@1111.service has begun execution. ░░ ░░ The job identifier is 1545. Jan 11 11:31:16 managed-node3 systemd[1]: Finished user-runtime-dir@1111.service - User Runtime Directory /run/user/1111. ░░ Subject: A start job for unit user-runtime-dir@1111.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@1111.service has finished successfully. ░░ ░░ The job identifier is 1545. Jan 11 11:31:16 managed-node3 systemd[1]: Starting user@1111.service - User Manager for UID 1111... ░░ Subject: A start job for unit user@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@1111.service has begun execution. ░░ ░░ The job identifier is 1625. Jan 11 11:31:16 managed-node3 systemd-logind[657]: New session 6 of user user_quadlet_basic. ░░ Subject: A new session 6 has been created for user user_quadlet_basic ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user user_quadlet_basic. ░░ ░░ The leading process of the session is 29953. Jan 11 11:31:16 managed-node3 (systemd)[29953]: pam_unix(systemd-user:session): session opened for user user_quadlet_basic(uid=1111) by user_quadlet_basic(uid=0) Jan 11 11:31:16 managed-node3 systemd[29953]: Queued start job for default target default.target. Jan 11 11:31:16 managed-node3 systemd[29953]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jan 11 11:31:16 managed-node3 systemd[29953]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 11 11:31:16 managed-node3 systemd[29953]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 11 11:31:16 managed-node3 systemd[29953]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 11 11:31:16 managed-node3 systemd[29953]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 11 11:31:16 managed-node3 systemd[29953]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Jan 11 11:31:16 managed-node3 systemd[29953]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jan 11 11:31:16 managed-node3 systemd[29953]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 11 11:31:16 managed-node3 systemd[29953]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 11 11:31:16 managed-node3 systemd[29953]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 11 11:31:16 managed-node3 systemd[29953]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 11 11:31:16 managed-node3 systemd[29953]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 11 11:31:16 managed-node3 systemd[29953]: Startup finished in 70ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 1111 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 70397 microseconds. Jan 11 11:31:16 managed-node3 systemd[1]: Started user@1111.service - User Manager for UID 1111. ░░ Subject: A start job for unit user@1111.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@1111.service has finished successfully. ░░ ░░ The job identifier is 1625. Jan 11 11:31:16 managed-node3 python3.12[30099]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:16 managed-node3 sudo[30274]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-avxipeoxrjhlkfrazvfkttsavzknrimr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613076.7151632-12877-15443294202840/AnsiballZ_podman_secret.py' Jan 11 11:31:16 managed-node3 sudo[30274]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:17 managed-node3 systemd[29953]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 11 11:31:17 managed-node3 systemd[29953]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 11 11:31:17 managed-node3 dbus-broker-launch[30306]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 11 11:31:17 managed-node3 dbus-broker-launch[30306]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 11 11:31:17 managed-node3 systemd[29953]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 11 11:31:17 managed-node3 dbus-broker-launch[30306]: Ready Jan 11 11:31:17 managed-node3 systemd[29953]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 11 11:31:17 managed-node3 systemd[29953]: Started podman-30290.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 11 11:31:17 managed-node3 systemd[29953]: Started podman-pause-60dc1641.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Jan 11 11:31:17 managed-node3 systemd[29953]: Started podman-30308.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jan 11 11:31:17 managed-node3 systemd[29953]: Started podman-30316.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Jan 11 11:31:18 managed-node3 sudo[30274]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:19 managed-node3 python3.12[30453]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:19 managed-node3 python3.12[30584]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:20 managed-node3 sudo[30759]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dwntgnrpngizjiznvufvyqulguydtjcy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613080.02285-12950-111514160688076/AnsiballZ_podman_secret.py' Jan 11 11:31:20 managed-node3 sudo[30759]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:20 managed-node3 systemd[29953]: Started podman-30769.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Jan 11 11:31:20 managed-node3 systemd[29953]: Started podman-30777.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Jan 11 11:31:20 managed-node3 systemd[29953]: Started podman-30785.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Jan 11 11:31:20 managed-node3 sudo[30759]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:21 managed-node3 python3.12[30922]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:22 managed-node3 python3.12[31055]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:22 managed-node3 python3.12[31187]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:23 managed-node3 python3.12[31319]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:24 managed-node3 python3.12[31450]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:24 managed-node3 python3.12[31581]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:24 managed-node3 python3.12[31686]: ansible-ansible.legacy.copy Invoked with dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network owner=user_quadlet_basic group=1111 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736613084.2410774-13105-219658197907052/.source.network _original_basename=.g3ne8830 follow=False checksum=19c9b17be2af9b9deca5c3bd327f048966750682 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:25 managed-node3 sudo[31859]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-judnrhcxiyrmxoqgnstgcbryyvbpxslz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613084.9635346-13143-15106682890183/AnsiballZ_systemd.py' Jan 11 11:31:25 managed-node3 sudo[31859]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:25 managed-node3 python3.12[31862]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:25 managed-node3 systemd[29953]: Reload requested from client PID 31863 ('systemctl')... Jan 11 11:31:25 managed-node3 systemd[29953]: Reloading... Jan 11 11:31:25 managed-node3 systemd[29953]: Reloading finished in 37 ms. Jan 11 11:31:25 managed-node3 sudo[31859]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:25 managed-node3 sudo[32045]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tjkfapunhxorqvhumnjcumrorpkgfobz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613085.6133273-13171-166038780946792/AnsiballZ_systemd.py' Jan 11 11:31:25 managed-node3 sudo[32045]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:26 managed-node3 python3.12[32048]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:26 managed-node3 systemd[29953]: Starting podman-user-wait-network-online.service - Wait for system level network-online.target as user.... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 59. Jan 11 11:31:26 managed-node3 sh[32052]: active Jan 11 11:31:26 managed-node3 systemd[29953]: Finished podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 59. Jan 11 11:31:26 managed-node3 systemd[29953]: Starting quadlet-basic-network.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 47. Jan 11 11:31:26 managed-node3 quadlet-basic-network[32054]: quadlet-basic-name Jan 11 11:31:26 managed-node3 systemd[29953]: Finished quadlet-basic-network.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Jan 11 11:31:26 managed-node3 sudo[32045]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:27 managed-node3 python3.12[32192]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:27 managed-node3 python3.12[32325]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:27 managed-node3 python3.12[32457]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:29 managed-node3 python3.12[32589]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:29 managed-node3 python3.12[32720]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:30 managed-node3 python3.12[32851]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:30 managed-node3 python3.12[32956]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613089.8482535-13338-83917486083226/.source.network dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:30 managed-node3 sudo[33129]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gieamnuaoahbegtqksokeqhcwvhytali ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613090.578231-13370-65639197134826/AnsiballZ_systemd.py' Jan 11 11:31:30 managed-node3 sudo[33129]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:31 managed-node3 python3.12[33132]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:31 managed-node3 systemd[29953]: Reload requested from client PID 33133 ('systemctl')... Jan 11 11:31:31 managed-node3 systemd[29953]: Reloading... Jan 11 11:31:31 managed-node3 systemd[29953]: Reloading finished in 39 ms. Jan 11 11:31:31 managed-node3 sudo[33129]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:31 managed-node3 sudo[33315]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pvkaajvcijpqwmdbqpbrokublruligrd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613091.23687-13400-21248509799713/AnsiballZ_systemd.py' Jan 11 11:31:31 managed-node3 sudo[33315]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:31 managed-node3 python3.12[33318]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:31 managed-node3 systemd[29953]: Starting quadlet-basic-unused-network-network.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 60. Jan 11 11:31:31 managed-node3 quadlet-basic-unused-network-network[33321]: systemd-quadlet-basic-unused-network Jan 11 11:31:31 managed-node3 systemd[29953]: Finished quadlet-basic-unused-network-network.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 60. Jan 11 11:31:31 managed-node3 sudo[33315]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:32 managed-node3 python3.12[33459]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:33 managed-node3 python3.12[33592]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:33 managed-node3 python3.12[33724]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:34 managed-node3 python3.12[33856]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:35 managed-node3 python3.12[33987]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:35 managed-node3 python3.12[34118]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:36 managed-node3 python3.12[34223]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613095.4446378-13576-269972147901892/.source.volume dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=90a3571bfc7670328fe3f8fb625585613dbd9c4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:36 managed-node3 sudo[34396]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-znmlewfejphqczptpxrtplngttyqifke ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613096.1590877-13607-158223923579391/AnsiballZ_systemd.py' Jan 11 11:31:36 managed-node3 sudo[34396]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:36 managed-node3 python3.12[34399]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:36 managed-node3 systemd[29953]: Reload requested from client PID 34400 ('systemctl')... Jan 11 11:31:36 managed-node3 systemd[29953]: Reloading... Jan 11 11:31:36 managed-node3 systemd[29953]: Reloading finished in 39 ms. Jan 11 11:31:36 managed-node3 sudo[34396]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:37 managed-node3 sudo[34582]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cjnisejsqpmgphnpswpkgxbgdksyruii ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613096.8421135-13630-12722030320446/AnsiballZ_systemd.py' Jan 11 11:31:37 managed-node3 sudo[34582]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:37 managed-node3 python3.12[34585]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:37 managed-node3 systemd[29953]: Starting quadlet-basic-mysql-volume.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 73. Jan 11 11:31:37 managed-node3 quadlet-basic-mysql-volume[34588]: quadlet-basic-mysql-name Jan 11 11:31:37 managed-node3 systemd[29953]: Finished quadlet-basic-mysql-volume.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 73. Jan 11 11:31:37 managed-node3 sudo[34582]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:38 managed-node3 python3.12[34727]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:38 managed-node3 python3.12[34860]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:39 managed-node3 python3.12[34992]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:40 managed-node3 python3.12[35124]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:41 managed-node3 python3.12[35255]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:41 managed-node3 python3.12[35386]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:42 managed-node3 python3.12[35491]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613101.373204-13827-106009179447385/.source.volume dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=fd0ae560360afa5541b866560b1e849d25e216ef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:42 managed-node3 sudo[35664]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gzbxfjidrwldagfpfjxfakunpdndbxml ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613102.1354904-13858-226367834609827/AnsiballZ_systemd.py' Jan 11 11:31:42 managed-node3 sudo[35664]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:42 managed-node3 python3.12[35667]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:42 managed-node3 systemd[29953]: Reload requested from client PID 35668 ('systemctl')... Jan 11 11:31:42 managed-node3 systemd[29953]: Reloading... Jan 11 11:31:42 managed-node3 systemd[29953]: Reloading finished in 40 ms. Jan 11 11:31:42 managed-node3 sudo[35664]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:43 managed-node3 sudo[35850]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bifqattdlqdynwwkzwhyvfvjtsmxqjbb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613102.8123279-13888-16675578239037/AnsiballZ_systemd.py' Jan 11 11:31:43 managed-node3 sudo[35850]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:43 managed-node3 python3.12[35853]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:43 managed-node3 systemd[29953]: Starting quadlet-basic-unused-volume-volume.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 86. Jan 11 11:31:43 managed-node3 quadlet-basic-unused-volume-volume[35856]: systemd-quadlet-basic-unused-volume Jan 11 11:31:43 managed-node3 systemd[29953]: Finished quadlet-basic-unused-volume-volume.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Jan 11 11:31:43 managed-node3 sudo[35850]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:44 managed-node3 python3.12[35995]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:44 managed-node3 python3.12[36128]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:45 managed-node3 python3.12[36260]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:46 managed-node3 python3.12[36392]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:46 managed-node3 sudo[36565]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yejaoueywdzgyawvvauyidvsaoeljkuw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613106.406838-14031-178920898729166/AnsiballZ_podman_image.py' Jan 11 11:31:46 managed-node3 sudo[36565]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:46 managed-node3 systemd[29953]: Started podman-36569.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 99. Jan 11 11:31:46 managed-node3 systemd[29953]: Started podman-36577.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 103. Jan 11 11:31:53 managed-node3 systemd[29953]: podman-36577.scope: Consumed 8.710s CPU time, 472.3M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Jan 11 11:31:53 managed-node3 systemd[29953]: Started podman-36750.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Jan 11 11:31:54 managed-node3 sudo[36565]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:54 managed-node3 python3.12[36888]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:54 managed-node3 python3.12[37019]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:55 managed-node3 python3.12[37124]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613114.5874217-14235-248964580093201/.source.container dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=0b6cac7929623f1059e78ef39b8b0a25169b28a6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:55 managed-node3 sudo[37297]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qjigkbqqbsumpewtjyywkklizpdscvme ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613115.4036748-14257-2932215252036/AnsiballZ_systemd.py' Jan 11 11:31:55 managed-node3 sudo[37297]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:55 managed-node3 python3.12[37300]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:55 managed-node3 systemd[29953]: Reload requested from client PID 37301 ('systemctl')... Jan 11 11:31:55 managed-node3 systemd[29953]: Reloading... Jan 11 11:31:55 managed-node3 systemd[29953]: Reloading finished in 42 ms. Jan 11 11:31:55 managed-node3 sudo[37297]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:56 managed-node3 sudo[37483]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jttwmojzbswtvzunudlkxxoaaojyxqds ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613116.0337892-14274-250502948281504/AnsiballZ_systemd.py' Jan 11 11:31:56 managed-node3 sudo[37483]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:31:56 managed-node3 python3.12[37486]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:56 managed-node3 systemd[29953]: Starting quadlet-basic-mysql.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 111. Jan 11 11:31:56 managed-node3 kernel: tun: Universal TUN/TAP device driver, 1.6 Jan 11 11:31:56 managed-node3 systemd[29953]: Started rootless-netns-e1dda47f.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 126. Jan 11 11:31:56 managed-node3 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jan 11 11:31:56 managed-node3 kernel: podman1: port 1(veth0) entered blocking state Jan 11 11:31:56 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Jan 11 11:31:56 managed-node3 kernel: veth0: entered allmulticast mode Jan 11 11:31:56 managed-node3 kernel: veth0: entered promiscuous mode Jan 11 11:31:56 managed-node3 kernel: podman1: port 1(veth0) entered blocking state Jan 11 11:31:56 managed-node3 kernel: podman1: port 1(veth0) entered forwarding state Jan 11 11:31:56 managed-node3 systemd[29953]: Started run-p37546-i37846.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/1111/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 130. Jan 11 11:31:56 managed-node3 systemd[29953]: Started quadlet-basic-mysql.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 111. Jan 11 11:31:57 managed-node3 quadlet-basic-mysql[37489]: ebf545f510b185bac07f58eac6edcfcdbaa1f2590184b5c102a917b7b2414ffc Jan 11 11:31:57 managed-node3 sudo[37483]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:31:57 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Jan 11 11:31:57 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:31:57 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:31:57 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Jan 11 11:31:57 managed-node3 systemd[29953]: quadlet-basic-mysql.service: Main process exited, code=exited, status=127/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit UNIT has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 127. Jan 11 11:31:57 managed-node3 systemd[29953]: quadlet-basic-mysql.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT has entered the 'failed' state with result 'exit-code'. Jan 11 11:31:57 managed-node3 python3.12[37713]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:58 managed-node3 python3.12[37845]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:00 managed-node3 python3.12[37977]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:00 managed-node3 python3.12[38109]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_basic follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:02 managed-node3 python3.12[38373]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:03 managed-node3 python3.12[38510]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:04 managed-node3 python3.12[38643]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:04 managed-node3 python3.12[38775]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:07 managed-node3 python3.12[38907]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:07 managed-node3 sudo[39082]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yvmxxhpzvvgqielvmbqcweqwdztzymmd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613127.4405448-14633-245979959966301/AnsiballZ_podman_secret.py' Jan 11 11:32:07 managed-node3 sudo[39082]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:07 managed-node3 systemd[29953]: Started podman-39086.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 134. Jan 11 11:32:07 managed-node3 sudo[39082]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:09 managed-node3 python3.12[39223]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:09 managed-node3 sudo[39398]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-csblizunbfvzmlfftxoqtdjcntsrnngp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613129.4278853-14724-51853748470117/AnsiballZ_podman_secret.py' Jan 11 11:32:09 managed-node3 sudo[39398]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:09 managed-node3 systemd[29953]: Started podman-39402.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 138. Jan 11 11:32:09 managed-node3 sudo[39398]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:10 managed-node3 python3.12[39540]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:11 managed-node3 python3.12[39673]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:11 managed-node3 python3.12[39805]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:12 managed-node3 python3.12[39937]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:13 managed-node3 sudo[40112]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ocxduommqxitdwnbedqsszjqaqdntcaj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613132.9935164-14872-92584878593836/AnsiballZ_systemd.py' Jan 11 11:32:13 managed-node3 sudo[40112]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:13 managed-node3 python3.12[40115]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:32:13 managed-node3 systemd[29953]: Reload requested from client PID 40118 ('systemctl')... Jan 11 11:32:13 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:13 managed-node3 systemd[29953]: Reloading finished in 42 ms. Jan 11 11:32:13 managed-node3 sudo[40112]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:13 managed-node3 python3.12[40259]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:14 managed-node3 python3.12[40523]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:15 managed-node3 sudo[40696]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lzwfvxkfspvoznvkuqhvovftzcydlmgw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613134.9800293-14942-55593340973778/AnsiballZ_systemd.py' Jan 11 11:32:15 managed-node3 sudo[40696]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:15 managed-node3 python3.12[40699]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:15 managed-node3 systemd[29953]: Reload requested from client PID 40700 ('systemctl')... Jan 11 11:32:15 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:15 managed-node3 systemd[29953]: Reloading finished in 43 ms. Jan 11 11:32:15 managed-node3 sudo[40696]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:15 managed-node3 sudo[40882]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-knyeodcprzbiypreezsfqbknyxrhhbsa ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613135.6275008-14959-34319500053231/AnsiballZ_command.py' Jan 11 11:32:15 managed-node3 sudo[40882]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:16 managed-node3 systemd[29953]: Started podman-40886.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 142. Jan 11 11:32:16 managed-node3 sudo[40882]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:16 managed-node3 sudo[41065]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jytjagiplhuhrzswzwaosvwhotddftkt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613136.2272286-14978-110077178975768/AnsiballZ_command.py' Jan 11 11:32:16 managed-node3 sudo[41065]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:16 managed-node3 python3.12[41068]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:16 managed-node3 systemd[29953]: Started podman-41069.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 146. Jan 11 11:32:17 managed-node3 sudo[41065]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:17 managed-node3 sudo[41249]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hllzzhqpmtcjlnistruziuasggarrark ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613137.5180597-14999-25111524180527/AnsiballZ_command.py' Jan 11 11:32:17 managed-node3 sudo[41249]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:17 managed-node3 python3.12[41252]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:17 managed-node3 systemd[29953]: Started podman-41253.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 150. Jan 11 11:32:17 managed-node3 sudo[41249]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:18 managed-node3 sudo[41433]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qrrwcybanbyozqsysrdltneidcmfbrwg ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613138.006892-15009-274072191063702/AnsiballZ_command.py' Jan 11 11:32:18 managed-node3 sudo[41433]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:18 managed-node3 python3.12[41436]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:18 managed-node3 systemd[29953]: Started podman-41437.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 154. Jan 11 11:32:18 managed-node3 sudo[41433]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:18 managed-node3 sudo[41617]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-prfczuukauwjvzfeckpiacmksfaghvgl ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613138.5093179-15026-260297364284951/AnsiballZ_command.py' Jan 11 11:32:18 managed-node3 sudo[41617]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:18 managed-node3 python3.12[41620]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:18 managed-node3 systemd[29953]: Started podman-41621.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 158. Jan 11 11:32:18 managed-node3 sudo[41617]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:19 managed-node3 sudo[41801]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nflymdojzerdynxziygvrkpgkwivzoap ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613138.9904163-15036-68605387076336/AnsiballZ_command.py' Jan 11 11:32:19 managed-node3 sudo[41801]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:19 managed-node3 python3.12[41804]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:19 managed-node3 systemd[29953]: Started podman-41805.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 162. Jan 11 11:32:19 managed-node3 sudo[41801]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:19 managed-node3 sudo[41984]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bvysutorsyzhjrzcczswcnwdctcwxmmm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613139.4753275-15053-13673547339577/AnsiballZ_command.py' Jan 11 11:32:19 managed-node3 sudo[41984]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:19 managed-node3 systemd[29953]: Started podman-41988.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 166. Jan 11 11:32:19 managed-node3 sudo[41984]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:20 managed-node3 sudo[42168]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-werphmcnftfwxfjwrpmrwysmxvarqjoc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613139.9615653-15063-209914961622125/AnsiballZ_command.py' Jan 11 11:32:20 managed-node3 sudo[42168]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:20 managed-node3 systemd[29953]: Started podman-42172.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 170. Jan 11 11:32:20 managed-node3 sudo[42168]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:20 managed-node3 sudo[42351]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ccwgrqxgrooemqnkxghvghzdldenxrwd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613140.447228-15080-276444025678020/AnsiballZ_service_facts.py' Jan 11 11:32:20 managed-node3 sudo[42351]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:20 managed-node3 python3.12[42354]: ansible-service_facts Invoked Jan 11 11:32:23 managed-node3 sudo[42351]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:24 managed-node3 python3.12[42594]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:24 managed-node3 python3.12[42727]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:25 managed-node3 python3.12[42859]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:26 managed-node3 python3.12[42991]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:26 managed-node3 sudo[43166]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qzaetfbsqafgfwxudllfxfsbltawrfpy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613146.1629307-15178-5179065352967/AnsiballZ_systemd.py' Jan 11 11:32:26 managed-node3 sudo[43166]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:26 managed-node3 python3.12[43169]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:32:26 managed-node3 systemd[29953]: Reload requested from client PID 43172 ('systemctl')... Jan 11 11:32:26 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:26 managed-node3 systemd[29953]: Reloading finished in 41 ms. Jan 11 11:32:26 managed-node3 systemd[29953]: Stopped quadlet-basic-unused-volume-volume.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 174 and the job result is done. Jan 11 11:32:26 managed-node3 sudo[43166]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:27 managed-node3 python3.12[43313]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:28 managed-node3 python3.12[43577]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:28 managed-node3 sudo[43750]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oswyiuzqfslukkvmebhjffiawnuhugzr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613148.2279422-15237-77436776759072/AnsiballZ_systemd.py' Jan 11 11:32:28 managed-node3 sudo[43750]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:28 managed-node3 python3.12[43753]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:28 managed-node3 systemd[29953]: Reload requested from client PID 43754 ('systemctl')... Jan 11 11:32:28 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:28 managed-node3 systemd[29953]: Reloading finished in 40 ms. Jan 11 11:32:28 managed-node3 sudo[43750]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:29 managed-node3 sudo[43937]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xhztpzncvtnsrzovwxczjzxypbzucjus ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613148.8688755-15254-224558205393549/AnsiballZ_command.py' Jan 11 11:32:29 managed-node3 sudo[43937]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:29 managed-node3 systemd[29953]: Started podman-43941.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 175. Jan 11 11:32:29 managed-node3 sudo[43937]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:29 managed-node3 sudo[44120]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kinrlbhhhnqykpfwqdwhrdmzreljcnkz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613149.4480538-15266-184365846934705/AnsiballZ_command.py' Jan 11 11:32:29 managed-node3 sudo[44120]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:29 managed-node3 python3.12[44123]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:29 managed-node3 systemd[29953]: Started podman-44124.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 179. Jan 11 11:32:29 managed-node3 sudo[44120]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:30 managed-node3 sudo[44304]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ecscxesocxjbaijxnekgjkphcjppkpdo ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613150.1066644-15287-153720165973771/AnsiballZ_command.py' Jan 11 11:32:30 managed-node3 sudo[44304]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:30 managed-node3 python3.12[44307]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:30 managed-node3 systemd[29953]: Started podman-44308.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 183. Jan 11 11:32:30 managed-node3 sudo[44304]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:30 managed-node3 sudo[44487]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mvpbtgaswrldrpfabhuaugzuoxjqiaao ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613150.590966-15297-118183946335853/AnsiballZ_command.py' Jan 11 11:32:30 managed-node3 sudo[44487]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:30 managed-node3 python3.12[44490]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:30 managed-node3 systemd[29953]: Started podman-44491.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 187. Jan 11 11:32:30 managed-node3 sudo[44487]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:31 managed-node3 sudo[44672]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-phkkkwrciwvimaxhqpwqlwlllokahmst ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613151.1694026-15317-107071091083711/AnsiballZ_command.py' Jan 11 11:32:31 managed-node3 sudo[44672]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:31 managed-node3 python3.12[44675]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:31 managed-node3 systemd[29953]: Started podman-44676.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 191. Jan 11 11:32:31 managed-node3 sudo[44672]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:31 managed-node3 sudo[44856]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-frdygsuahlrtmtbqwbnzpycnjdnnsild ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613151.6694036-15339-10608296490480/AnsiballZ_command.py' Jan 11 11:32:31 managed-node3 sudo[44856]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:31 managed-node3 python3.12[44859]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:32 managed-node3 systemd[29953]: Started podman-44860.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 195. Jan 11 11:32:32 managed-node3 sudo[44856]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:32 managed-node3 sudo[45040]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-emywwtgfcenjejbogbtikyfhyrjwbqdh ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613152.1738348-15365-51371937836539/AnsiballZ_command.py' Jan 11 11:32:32 managed-node3 sudo[45040]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:32 managed-node3 systemd[29953]: Started podman-45044.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 199. Jan 11 11:32:32 managed-node3 sudo[45040]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:32 managed-node3 sudo[45223]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-luwujmvgrbrtbtmrsvxxyhzbzuojsplx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613152.6817553-15384-262447572271742/AnsiballZ_command.py' Jan 11 11:32:32 managed-node3 sudo[45223]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:33 managed-node3 systemd[29953]: Started podman-45227.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 203. Jan 11 11:32:33 managed-node3 sudo[45223]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:33 managed-node3 sudo[45406]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gepcymxvzluuukkrdekutwcirittacyp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613153.1890347-15409-206095138219026/AnsiballZ_service_facts.py' Jan 11 11:32:33 managed-node3 sudo[45406]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:33 managed-node3 python3.12[45409]: ansible-service_facts Invoked Jan 11 11:32:36 managed-node3 sudo[45406]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:36 managed-node3 python3.12[45649]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:37 managed-node3 python3.12[45782]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:37 managed-node3 python3.12[45914]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:38 managed-node3 python3.12[46046]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:39 managed-node3 sudo[46221]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ygjkadouzrsfmsuusvdxznhlliradfmz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613158.8254309-15535-31899405719367/AnsiballZ_systemd.py' Jan 11 11:32:39 managed-node3 sudo[46221]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:39 managed-node3 python3.12[46224]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:32:39 managed-node3 systemd[29953]: Reload requested from client PID 46227 ('systemctl')... Jan 11 11:32:39 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:39 managed-node3 systemd[29953]: Reloading finished in 40 ms. Jan 11 11:32:39 managed-node3 systemd[29953]: Stopped quadlet-basic-mysql-volume.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 207 and the job result is done. Jan 11 11:32:39 managed-node3 sudo[46221]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:39 managed-node3 python3.12[46368]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:40 managed-node3 python3.12[46632]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:41 managed-node3 sudo[46805]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nlqbazwakvjstpzbmlxkgcnsprxzfjnf ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613160.9532418-15612-158023406770613/AnsiballZ_systemd.py' Jan 11 11:32:41 managed-node3 sudo[46805]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:41 managed-node3 python3.12[46808]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:41 managed-node3 systemd[29953]: Reload requested from client PID 46809 ('systemctl')... Jan 11 11:32:41 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:41 managed-node3 systemd[29953]: Reloading finished in 41 ms. Jan 11 11:32:41 managed-node3 sudo[46805]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:41 managed-node3 sudo[46991]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-olvqqqlgujiakgkdrnxdvzcphlqhcrrq ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613161.6072545-15629-194620579547675/AnsiballZ_command.py' Jan 11 11:32:41 managed-node3 sudo[46991]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:41 managed-node3 systemd[29953]: Started podman-46995.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 208. Jan 11 11:32:42 managed-node3 sudo[46991]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:42 managed-node3 sudo[47175]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ntdbeeglzlxijrndxzcjpaoitaxuiuzp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613162.2185133-15648-214242175966943/AnsiballZ_command.py' Jan 11 11:32:42 managed-node3 sudo[47175]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:42 managed-node3 python3.12[47178]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:42 managed-node3 systemd[29953]: Started podman-47179.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 212. Jan 11 11:32:42 managed-node3 sudo[47175]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:43 managed-node3 sudo[47359]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eenwafrenrddxfbyrebthlmheplertmy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613162.8959327-15662-100119550112663/AnsiballZ_command.py' Jan 11 11:32:43 managed-node3 sudo[47359]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:43 managed-node3 python3.12[47362]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:43 managed-node3 systemd[29953]: Started podman-47363.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 216. Jan 11 11:32:43 managed-node3 sudo[47359]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:43 managed-node3 sudo[47542]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vmfjrvkpjoqkyjpepvoauwirvdwbtmov ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613163.3848853-15679-126353248656444/AnsiballZ_command.py' Jan 11 11:32:43 managed-node3 sudo[47542]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:43 managed-node3 python3.12[47545]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:43 managed-node3 systemd[29953]: Started podman-47546.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 220. Jan 11 11:32:43 managed-node3 sudo[47542]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:44 managed-node3 sudo[47725]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bsxybzzyhbafngjkgkaimpsnjracquul ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613163.8730524-15689-240419964070751/AnsiballZ_command.py' Jan 11 11:32:44 managed-node3 sudo[47725]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:44 managed-node3 python3.12[47728]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:44 managed-node3 systemd[29953]: Started podman-47729.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 224. Jan 11 11:32:44 managed-node3 sudo[47725]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:44 managed-node3 sudo[47909]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sgxohzvnyrevubejyssguuqdajxurapj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613164.3751202-15706-134908010713081/AnsiballZ_command.py' Jan 11 11:32:44 managed-node3 sudo[47909]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:44 managed-node3 python3.12[47912]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:44 managed-node3 systemd[29953]: Started podman-47913.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 228. Jan 11 11:32:45 managed-node3 sudo[47909]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:46 managed-node3 sudo[48092]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kyyaodyviumlxgwqmfzpbsowdkmqwyes ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613165.923966-15730-29888098965952/AnsiballZ_command.py' Jan 11 11:32:46 managed-node3 sudo[48092]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:46 managed-node3 systemd[29953]: Started podman-48096.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 232. Jan 11 11:32:46 managed-node3 sudo[48092]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:46 managed-node3 sudo[48276]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ggzunytuwzdiqomnjnnlhuniaqzoeecx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613166.4153993-15747-220401453367475/AnsiballZ_command.py' Jan 11 11:32:46 managed-node3 sudo[48276]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:46 managed-node3 systemd[29953]: Started podman-48280.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 236. Jan 11 11:32:46 managed-node3 sudo[48276]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:47 managed-node3 sudo[48461]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pcxnlefzejfqdvcymkgbqnehzjtxjigc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613166.901463-15757-108990047623750/AnsiballZ_service_facts.py' Jan 11 11:32:47 managed-node3 sudo[48461]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:47 managed-node3 python3.12[48464]: ansible-service_facts Invoked Jan 11 11:32:50 managed-node3 sudo[48461]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:51 managed-node3 python3.12[48704]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:51 managed-node3 python3.12[48837]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:52 managed-node3 python3.12[48969]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:53 managed-node3 python3.12[49101]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:53 managed-node3 sudo[49276]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-evgxfoyxiwbqdqempbsppcroxpnfhlxw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613173.5064712-15869-18986601889721/AnsiballZ_systemd.py' Jan 11 11:32:53 managed-node3 sudo[49276]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:53 managed-node3 python3.12[49279]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:32:53 managed-node3 systemd[29953]: Reload requested from client PID 49282 ('systemctl')... Jan 11 11:32:53 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:54 managed-node3 systemd[29953]: Reloading finished in 39 ms. Jan 11 11:32:54 managed-node3 systemd[29953]: Stopped quadlet-basic-unused-network-network.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 240 and the job result is done. Jan 11 11:32:54 managed-node3 sudo[49276]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:54 managed-node3 python3.12[49424]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:55 managed-node3 python3.12[49688]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:55 managed-node3 sudo[49861]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gttdsfiezuikxpvcvetqjlvxxupkwbsa ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613175.5939744-15921-40852223920198/AnsiballZ_systemd.py' Jan 11 11:32:55 managed-node3 sudo[49861]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:56 managed-node3 python3.12[49864]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:56 managed-node3 systemd[29953]: Reload requested from client PID 49865 ('systemctl')... Jan 11 11:32:56 managed-node3 systemd[29953]: Reloading... Jan 11 11:32:56 managed-node3 systemd[29953]: Reloading finished in 38 ms. Jan 11 11:32:56 managed-node3 sudo[49861]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:56 managed-node3 sudo[50047]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sjpvqiylotnhfwaghjphbqbbrsokfson ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613176.2481945-15942-28777684928469/AnsiballZ_command.py' Jan 11 11:32:56 managed-node3 sudo[50047]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:56 managed-node3 systemd[29953]: Started podman-50051.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 241. Jan 11 11:32:56 managed-node3 sudo[50047]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:57 managed-node3 sudo[50232]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mfbdcyqongrzwyenglsyzsvowblforje ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613176.8512917-15964-59712057301008/AnsiballZ_command.py' Jan 11 11:32:57 managed-node3 sudo[50232]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:57 managed-node3 python3.12[50235]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:57 managed-node3 systemd[29953]: Started podman-50236.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 245. Jan 11 11:32:57 managed-node3 sudo[50232]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:57 managed-node3 sudo[50416]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tdgbjwxiyttiuyordlblnyombdfqxyjm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613177.5222197-15978-100401395843130/AnsiballZ_command.py' Jan 11 11:32:57 managed-node3 sudo[50416]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:57 managed-node3 systemd[4347]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 11 11:32:57 managed-node3 systemd[4347]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 11 11:32:57 managed-node3 python3.12[50419]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:57 managed-node3 systemd[4347]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 11 11:32:57 managed-node3 systemd[29953]: Started podman-50421.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 249. Jan 11 11:32:57 managed-node3 sudo[50416]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:58 managed-node3 sudo[50601]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zfteeifopenivhxazgitjcrcqfmyksel ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613178.0105646-15995-149678124235006/AnsiballZ_command.py' Jan 11 11:32:58 managed-node3 sudo[50601]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:58 managed-node3 python3.12[50604]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:58 managed-node3 systemd[29953]: Started podman-50605.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 253. Jan 11 11:32:58 managed-node3 sudo[50601]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:58 managed-node3 sudo[50785]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-egqdyqznzcadumsewivtsjrjifumgmnr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613178.4993725-16005-158070925322928/AnsiballZ_command.py' Jan 11 11:32:58 managed-node3 sudo[50785]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:58 managed-node3 python3.12[50788]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:58 managed-node3 systemd[29953]: Started podman-50789.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 257. Jan 11 11:32:58 managed-node3 sudo[50785]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:59 managed-node3 sudo[50968]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vcjkgnbljfkvojmkcfpyyzcplefoapjr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613178.9969528-16022-275256960740328/AnsiballZ_command.py' Jan 11 11:32:59 managed-node3 sudo[50968]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:59 managed-node3 python3.12[50971]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:59 managed-node3 systemd[29953]: Started podman-50972.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 261. Jan 11 11:32:59 managed-node3 sudo[50968]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:32:59 managed-node3 sudo[51151]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tffpdxsyygnrqjkhtigzdfnjhcvrldhj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613179.4939308-16032-34418897922330/AnsiballZ_command.py' Jan 11 11:32:59 managed-node3 sudo[51151]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:32:59 managed-node3 systemd[29953]: Started podman-51155.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 265. Jan 11 11:32:59 managed-node3 sudo[51151]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:00 managed-node3 sudo[51334]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fscctwpcinzpwlfpwctcegonfrkqcjmj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613180.0506632-16049-266233763334283/AnsiballZ_command.py' Jan 11 11:33:00 managed-node3 sudo[51334]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:00 managed-node3 systemd[29953]: Started podman-51338.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 269. Jan 11 11:33:00 managed-node3 sudo[51334]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:00 managed-node3 sudo[51519]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-udsrymbuyabvtxkxqugobfychaijeliv ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613180.5378177-16059-81830683970881/AnsiballZ_service_facts.py' Jan 11 11:33:00 managed-node3 sudo[51519]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:00 managed-node3 python3.12[51522]: ansible-service_facts Invoked Jan 11 11:33:03 managed-node3 sudo[51519]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:04 managed-node3 python3.12[51762]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:04 managed-node3 python3.12[51895]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:04 managed-node3 python3.12[52027]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:06 managed-node3 python3.12[52159]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:06 managed-node3 sudo[52334]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ldjuwyprxzfpwyfaxyjppbmidyuugedw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613186.1550627-16171-92779059159981/AnsiballZ_systemd.py' Jan 11 11:33:06 managed-node3 sudo[52334]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:06 managed-node3 python3.12[52337]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:33:06 managed-node3 systemd[29953]: Reload requested from client PID 52340 ('systemctl')... Jan 11 11:33:06 managed-node3 systemd[29953]: Reloading... Jan 11 11:33:06 managed-node3 systemd[29953]: Reloading finished in 38 ms. Jan 11 11:33:06 managed-node3 systemd[29953]: Stopped quadlet-basic-network.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 273 and the job result is done. Jan 11 11:33:06 managed-node3 sudo[52334]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:07 managed-node3 python3.12[52482]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:08 managed-node3 python3.12[52746]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:08 managed-node3 sudo[52919]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uagpywdhzgpfkgpmzzkbwklhzdowvpgn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613188.3371105-16230-126811755623597/AnsiballZ_systemd.py' Jan 11 11:33:08 managed-node3 sudo[52919]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:08 managed-node3 python3.12[52922]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:33:08 managed-node3 systemd[29953]: Reload requested from client PID 52923 ('systemctl')... Jan 11 11:33:08 managed-node3 systemd[29953]: Reloading... Jan 11 11:33:08 managed-node3 systemd[29953]: Reloading finished in 38 ms. Jan 11 11:33:08 managed-node3 sudo[52919]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:09 managed-node3 sudo[53105]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ipdrbmdtumurrrbmvwiajmaarzgvepjs ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613188.9794345-16240-232689960246747/AnsiballZ_command.py' Jan 11 11:33:09 managed-node3 sudo[53105]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:09 managed-node3 systemd[29953]: Started podman-53109.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 274. Jan 11 11:33:09 managed-node3 sudo[53105]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:09 managed-node3 sudo[53289]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aqejbavkjycnkiyqqillxjmdqurikwbm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613189.5653348-16259-178661027946329/AnsiballZ_command.py' Jan 11 11:33:09 managed-node3 sudo[53289]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:09 managed-node3 python3.12[53292]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:09 managed-node3 systemd[29953]: Started podman-53293.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 278. Jan 11 11:33:09 managed-node3 sudo[53289]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:10 managed-node3 sudo[53474]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kblqseblugdxumuavemjcnhlsjpiucdc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613190.2663293-16279-228207363507580/AnsiballZ_command.py' Jan 11 11:33:10 managed-node3 sudo[53474]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:10 managed-node3 python3.12[53477]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:10 managed-node3 systemd[29953]: Started podman-53478.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 282. Jan 11 11:33:10 managed-node3 sudo[53474]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:10 managed-node3 sudo[53658]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cvgpcsjxlblvmadnccrmzffcfqcdfian ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613190.7590194-16290-275426166329867/AnsiballZ_command.py' Jan 11 11:33:10 managed-node3 sudo[53658]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:11 managed-node3 python3.12[53661]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:11 managed-node3 systemd[29953]: Started podman-53662.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 286. Jan 11 11:33:11 managed-node3 sudo[53658]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:11 managed-node3 sudo[53842]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yupaxvwblgltssqbecalhawuglbxilox ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613191.248186-16300-13154183487945/AnsiballZ_command.py' Jan 11 11:33:11 managed-node3 sudo[53842]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:11 managed-node3 python3.12[53845]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:11 managed-node3 systemd[29953]: Started podman-53846.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 290. Jan 11 11:33:11 managed-node3 sudo[53842]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:11 managed-node3 sudo[54025]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-smzfeudlvapqwxkgpacsggbfyaedflqd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613191.7495422-16317-244322107034894/AnsiballZ_command.py' Jan 11 11:33:11 managed-node3 sudo[54025]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:12 managed-node3 python3.12[54028]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:12 managed-node3 systemd[29953]: Started podman-54029.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 294. Jan 11 11:33:12 managed-node3 sudo[54025]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:12 managed-node3 sudo[54208]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ywdnanuftawliasgrdtqjocctmttgiug ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613192.2355778-16327-272190748778280/AnsiballZ_command.py' Jan 11 11:33:12 managed-node3 sudo[54208]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:12 managed-node3 systemd[29953]: Started podman-54212.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 298. Jan 11 11:33:12 managed-node3 sudo[54208]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:12 managed-node3 sudo[54392]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yffewrjjxmcmqzexvgdkeswjccyikcvq ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613192.7367122-16352-220457878244959/AnsiballZ_command.py' Jan 11 11:33:12 managed-node3 sudo[54392]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:13 managed-node3 systemd[29953]: Started podman-54396.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 302. Jan 11 11:33:13 managed-node3 sudo[54392]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:13 managed-node3 sudo[54577]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tzrizccgbonxgktzuibkpvpcamseyvcz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613193.2396564-16370-163125541590530/AnsiballZ_service_facts.py' Jan 11 11:33:13 managed-node3 sudo[54577]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:13 managed-node3 python3.12[54580]: ansible-service_facts Invoked Jan 11 11:33:15 managed-node3 sudo[54577]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:15 managed-node3 python3.12[54820]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:16 managed-node3 sudo[54995]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lpwpxzcewnzrzdjjbahcijherplhozgw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613196.1108353-16461-66317049932168/AnsiballZ_podman_container_info.py' Jan 11 11:33:16 managed-node3 sudo[54995]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:16 managed-node3 python3.12[54998]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Jan 11 11:33:16 managed-node3 systemd[29953]: Started podman-54999.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 306. Jan 11 11:33:16 managed-node3 sudo[54995]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:16 managed-node3 sudo[55178]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hcoumwfkcpdyzhlkkblacyounsqvojbc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613196.746958-16491-29644121397386/AnsiballZ_command.py' Jan 11 11:33:16 managed-node3 sudo[55178]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:17 managed-node3 python3.12[55181]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:17 managed-node3 systemd[29953]: Started podman-55182.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 310. Jan 11 11:33:17 managed-node3 sudo[55178]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:17 managed-node3 sudo[55363]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-logbuefdyxfzdjpetmahsedrtciglngz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613197.281544-16508-135824060290071/AnsiballZ_command.py' Jan 11 11:33:17 managed-node3 sudo[55363]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 11 11:33:17 managed-node3 python3.12[55366]: ansible-ansible.legacy.command Invoked with _raw_params=podman secret ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:17 managed-node3 systemd[29953]: Started podman-55367.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 314. Jan 11 11:33:17 managed-node3 sudo[55363]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 11 11:33:18 managed-node3 python3.12[55505]: ansible-ansible.legacy.command Invoked with removes=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl disable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Jan 11 11:33:18 managed-node3 systemd[1]: Stopping user@1111.service - User Manager for UID 1111... ░░ Subject: A stop job for unit user@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@1111.service has begun execution. ░░ ░░ The job identifier is 1722. Jan 11 11:33:18 managed-node3 systemd[29953]: Activating special unit exit.target... Jan 11 11:33:18 managed-node3 systemd[29953]: Stopping podman-pause-60dc1641.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 334. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 329 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 325 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 328 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 330 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 332 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 335 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 339 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 336 and the job result is done. Jan 11 11:33:18 managed-node3 dbus-broker[30307]: Dispatched 4411 messages @ 2(±17)μs / message. ░░ Subject: Dispatched 4411 messages ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ This message is printed by dbus-broker when shutting down. It includes metric ░░ information collected during the runtime of dbus-broker. ░░ ░░ The message lists the number of dispatched messages ░░ (in this case 4411) as well as the mean time to ░░ handling a single message. The time measurements exclude the time spent on ░░ writing to and reading from the kernel. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopping dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 327. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 324 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped podman-pause-60dc1641.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 334 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Removed slice user.slice - Slice /user. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 333 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: user.slice: Consumed 8.951s CPU time, 472.3M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Jan 11 11:33:18 managed-node3 systemd[29953]: Stopped dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 327 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Removed slice session.slice - User Core Session Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 331 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 326 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 322 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[29953]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 321. Jan 11 11:33:18 managed-node3 systemd[29953]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 319. Jan 11 11:33:18 managed-node3 systemd[29953]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 318. Jan 11 11:33:18 managed-node3 systemd[1]: user@1111.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@1111.service has successfully entered the 'dead' state. Jan 11 11:33:18 managed-node3 systemd[1]: Stopped user@1111.service - User Manager for UID 1111. ░░ Subject: A stop job for unit user@1111.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@1111.service has finished. ░░ ░░ The job identifier is 1722 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[1]: user@1111.service: Consumed 10.633s CPU time, 478.7M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@1111.service completed and consumed the indicated resources. Jan 11 11:33:18 managed-node3 systemd[1]: Stopping user-runtime-dir@1111.service - User Runtime Directory /run/user/1111... ░░ Subject: A stop job for unit user-runtime-dir@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@1111.service has begun execution. ░░ ░░ The job identifier is 1721. Jan 11 11:33:18 managed-node3 systemd[1]: run-user-1111.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-1111.mount has successfully entered the 'dead' state. Jan 11 11:33:18 managed-node3 systemd[1]: user-runtime-dir@1111.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@1111.service has successfully entered the 'dead' state. Jan 11 11:33:18 managed-node3 systemd[1]: Stopped user-runtime-dir@1111.service - User Runtime Directory /run/user/1111. ░░ Subject: A stop job for unit user-runtime-dir@1111.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@1111.service has finished. ░░ ░░ The job identifier is 1721 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[1]: Removed slice user-1111.slice - User Slice of UID 1111. ░░ Subject: A stop job for unit user-1111.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-1111.slice has finished. ░░ ░░ The job identifier is 1723 and the job result is done. Jan 11 11:33:18 managed-node3 systemd[1]: user-1111.slice: Consumed 10.662s CPU time, 478.7M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-1111.slice completed and consumed the indicated resources. Jan 11 11:33:18 managed-node3 systemd-logind[657]: Removed session 6. ░░ Subject: Session 6 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 6 has been terminated. Jan 11 11:33:18 managed-node3 python3.12[55642]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:19 managed-node3 python3.12[55774]: ansible-ansible.legacy.systemd Invoked with name=systemd-logind state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Jan 11 11:33:19 managed-node3 systemd[1]: Stopping systemd-logind.service - User Login Management... ░░ Subject: A stop job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 1725. Jan 11 11:33:19 managed-node3 systemd[1]: systemd-logind.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-logind.service has successfully entered the 'dead' state. Jan 11 11:33:19 managed-node3 systemd[1]: Stopped systemd-logind.service - User Login Management. ░░ Subject: A stop job for unit systemd-logind.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-logind.service has finished. ░░ ░░ The job identifier is 1725 and the job result is done. Jan 11 11:33:19 managed-node3 python3.12[55920]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:19 managed-node3 systemd[1]: Starting modprobe@drm.service - Load Kernel Module drm... ░░ Subject: A start job for unit modprobe@drm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has begun execution. ░░ ░░ The job identifier is 1806. Jan 11 11:33:19 managed-node3 systemd[1]: modprobe@drm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@drm.service has successfully entered the 'dead' state. Jan 11 11:33:19 managed-node3 systemd[1]: Finished modprobe@drm.service - Load Kernel Module drm. ░░ Subject: A start job for unit modprobe@drm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has finished successfully. ░░ ░░ The job identifier is 1806. Jan 11 11:33:19 managed-node3 systemd[1]: Starting systemd-logind.service - User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 1726. Jan 11 11:33:19 managed-node3 systemd-logind[55924]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Jan 11 11:33:19 managed-node3 systemd-logind[55924]: Watching system buttons on /dev/input/event0 (Power Button) Jan 11 11:33:19 managed-node3 systemd-logind[55924]: Watching system buttons on /dev/input/event1 (Sleep Button) Jan 11 11:33:19 managed-node3 systemd-logind[55924]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Jan 11 11:33:19 managed-node3 systemd[1]: Started systemd-logind.service - User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 1726. Jan 11 11:33:20 managed-node3 python3.12[56061]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_basic follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:22 managed-node3 python3.12[56323]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:23 managed-node3 python3.12[56460]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 11 11:33:24 managed-node3 python3.12[56592]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:27 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:27 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:28 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: Stopping session-3.scope - Session 3 of User root... ░░ Subject: A stop job for unit session-3.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-3.scope has begun execution. ░░ ░░ The job identifier is 1888. Jan 11 11:33:29 managed-node3 sshd-session[4394]: error: mm_reap: preauth child terminated by signal 15 Jan 11 11:33:29 managed-node3 systemd[1]: Stopping session-5.scope - Session 5 of User root... ░░ Subject: A stop job for unit session-5.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-5.scope has begun execution. ░░ ░░ The job identifier is 1889. Jan 11 11:33:29 managed-node3 sshd-session[6522]: error: mm_reap: preauth child terminated by signal 15 Jan 11 11:33:29 managed-node3 sshd-session[4394]: pam_systemd(sshd:session): Failed to release session: No session '3' known Jan 11 11:33:29 managed-node3 sshd-session[4394]: pam_unix(sshd:session): session closed for user root Jan 11 11:33:29 managed-node3 sshd-session[6522]: pam_systemd(sshd:session): Failed to release session: No session '5' known Jan 11 11:33:29 managed-node3 sshd-session[6522]: pam_unix(sshd:session): session closed for user root Jan 11 11:33:29 managed-node3 systemd[1]: session-3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: Stopped session-3.scope - Session 3 of User root. ░░ Subject: A stop job for unit session-3.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-3.scope has finished. ░░ ░░ The job identifier is 1888 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[1]: session-3.scope: Consumed 3.158s CPU time, 86.2M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope completed and consumed the indicated resources. Jan 11 11:33:29 managed-node3 systemd[1]: session-5.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-5.scope has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: Stopped session-5.scope - Session 5 of User root. ░░ Subject: A stop job for unit session-5.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-5.scope has finished. ░░ ░░ The job identifier is 1889 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[1]: session-5.scope: Consumed 2min 17.198s CPU time, 389.5M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-5.scope completed and consumed the indicated resources. Jan 11 11:33:29 managed-node3 systemd[1]: Stopping user@0.service - User Manager for UID 0... ░░ Subject: A stop job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 1890. Jan 11 11:33:29 managed-node3 systemd[4347]: Activating special unit exit.target... Jan 11 11:33:29 managed-node3 systemd[4347]: Removed slice background.slice - User Background Tasks Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 21 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 27 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 29 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 23 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 33 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 31 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 24 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 32 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 26 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 34 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[4347]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 11 11:33:29 managed-node3 systemd[4347]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Jan 11 11:33:29 managed-node3 systemd[4347]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 17. Jan 11 11:33:29 managed-node3 systemd[1]: user@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@0.service has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: Stopped user@0.service - User Manager for UID 0. ░░ Subject: A stop job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has finished. ░░ ░░ The job identifier is 1890 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[1]: Stopping user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A stop job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 1887. Jan 11 11:33:29 managed-node3 systemd[1]: run-user-0.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-0.mount has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: user-runtime-dir@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@0.service has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node3 systemd[1]: Stopped user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A stop job for unit user-runtime-dir@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has finished. ░░ ░░ The job identifier is 1887 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[1]: Removed slice user-0.slice - User Slice of UID 0. ░░ Subject: A stop job for unit user-0.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-0.slice has finished. ░░ ░░ The job identifier is 1891 and the job result is done. Jan 11 11:33:29 managed-node3 systemd[1]: user-0.slice: Consumed 2min 20.700s CPU time, 455M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-0.slice completed and consumed the indicated resources. Jan 11 11:33:29 managed-node3 sshd-session[56896]: Accepted publickey for root from 10.31.14.128 port 56984 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Jan 11 11:33:29 managed-node3 systemd[1]: Created slice user-0.slice - User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 1894. Jan 11 11:33:29 managed-node3 systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 1893. Jan 11 11:33:29 managed-node3 systemd-logind[55924]: New session 7 of user root. ░░ Subject: A new session 7 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 7 has been created for the user root. ░░ ░░ The leading process of the session is 56896. Jan 11 11:33:29 managed-node3 systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 1893. Jan 11 11:33:29 managed-node3 systemd[1]: Starting user@0.service - User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 1973. Jan 11 11:33:29 managed-node3 systemd-logind[55924]: New session 8 of user root. ░░ Subject: A new session 8 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 8 has been created for the user root. ░░ ░░ The leading process of the session is 56904. Jan 11 11:33:29 managed-node3 (systemd)[56904]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Jan 11 11:33:29 managed-node3 systemd[56904]: Queued start job for default target default.target. Jan 11 11:33:29 managed-node3 systemd[56904]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 11 11:33:29 managed-node3 systemd[56904]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 11 11:33:29 managed-node3 systemd[56904]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Jan 11 11:33:29 managed-node3 systemd[56904]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Jan 11 11:33:29 managed-node3 systemd[56904]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 11 11:33:29 managed-node3 systemd[56904]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Jan 11 11:33:29 managed-node3 systemd[56904]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jan 11 11:33:29 managed-node3 systemd[56904]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 11 11:33:29 managed-node3 systemd[56904]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 11 11:33:29 managed-node3 systemd[56904]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 11 11:33:29 managed-node3 systemd[56904]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 11 11:33:29 managed-node3 systemd[56904]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 11 11:33:29 managed-node3 systemd[56904]: Startup finished in 106ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 106977 microseconds. Jan 11 11:33:29 managed-node3 systemd[1]: Started user@0.service - User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 1973. Jan 11 11:33:30 managed-node3 systemd[1]: Started session-7.scope - Session 7 of User root. ░░ Subject: A start job for unit session-7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-7.scope has finished successfully. ░░ ░░ The job identifier is 2054. Jan 11 11:33:30 managed-node3 sshd-session[56896]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 11 11:33:31 managed-node3 python3.12[57095]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 11 11:33:32 managed-node3 python3.12[57255]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:32 managed-node3 python3.12[57386]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:34 managed-node3 python3.12[57648]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:35 managed-node3 python3.12[57786]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 11 11:33:36 managed-node3 python3.12[57918]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:38 managed-node3 python3.12[58051]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:39 managed-node3 python3.12[58184]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:39 managed-node3 python3.12[58315]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:33:40 managed-node3 python3.12[58420]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613219.4797897-17447-31959509434284/.source.pod dest=/etc/containers/systemd/quadlet-pod-pod.pod owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:41 managed-node3 python3.12[58551]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:33:41 managed-node3 systemd[1]: Reload requested from client PID 58552 ('systemctl') (unit session-7.scope)... Jan 11 11:33:41 managed-node3 systemd[1]: Reloading... Jan 11 11:33:41 managed-node3 systemd-rc-local-generator[58594]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:33:41 managed-node3 systemd-ssh-generator[58596]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:33:41 managed-node3 (sd-exec-[58569]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:33:41 managed-node3 systemd[1]: Reloading finished in 194 ms. Jan 11 11:33:41 managed-node3 python3.12[58734]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:33:41 managed-node3 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2136. Jan 11 11:33:41 managed-node3 systemd[1]: var-lib-containers-storage-overlay-compat2897760179-lower\x2dmapped.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat2897760179-lower\x2dmapped.mount has successfully entered the 'dead' state. Jan 11 11:33:42 managed-node3 podman[58738]: 2025-01-11 11:33:42.161976365 -0500 EST m=+0.307686542 image build 46a99829a23feee54ca5ee50428836042f880fe5158b9e9e380af827fc20c994 Jan 11 11:33:42 managed-node3 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 2221. Jan 11 11:33:42 managed-node3 systemd[1]: Created slice machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice - cgroup machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice. ░░ Subject: A start job for unit machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice has finished successfully. ░░ ░░ The job identifier is 2220. Jan 11 11:33:42 managed-node3 podman[58738]: 2025-01-11 11:33:42.214500844 -0500 EST m=+0.360211079 container create 6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:42 managed-node3 podman[58738]: 2025-01-11 11:33:42.223918114 -0500 EST m=+0.369628182 pod create f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16 (image=, name=quadlet-pod) Jan 11 11:33:42 managed-node3 quadlet-pod-pod-pod[58738]: f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16 Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:42 managed-node3 kernel: veth0: entered allmulticast mode Jan 11 11:33:42 managed-node3 kernel: veth0: entered promiscuous mode Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.2725] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.2795] device (podman0): carrier: link connected Jan 11 11:33:42 managed-node3 (udev-worker)[58803]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.2800] device (veth0): carrier: link connected Jan 11 11:33:42 managed-node3 (udev-worker)[58802]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.2833] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3105] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3110] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3116] device (podman0): Activation: starting connection 'podman0' (cc3b408c-6ff4-4bc3-afaa-024a4ecaf30b) Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3117] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3120] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3121] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3123] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:33:42 managed-node3 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2226. Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3459] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3462] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.3467] device (podman0): Activation: successful, device activated. Jan 11 11:33:42 managed-node3 systemd[1]: Started libpod-6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d.scope - libcrun container. ░░ Subject: A start job for unit libpod-6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d.scope has finished successfully. ░░ ░░ The job identifier is 2305. Jan 11 11:33:42 managed-node3 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2226. Jan 11 11:33:42 managed-node3 podman[58792]: 2025-01-11 11:33:42.384135577 -0500 EST m=+0.145163827 container init 6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:42 managed-node3 podman[58792]: 2025-01-11 11:33:42.387065432 -0500 EST m=+0.148093789 container start 6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:42 managed-node3 systemd[1]: libpod-6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d.scope has successfully entered the 'dead' state. Jan 11 11:33:42 managed-node3 podman[58792]: 2025-01-11 11:33:42.396752198 -0500 EST m=+0.157780383 pod start f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16 (image=, name=quadlet-pod) Jan 11 11:33:42 managed-node3 quadlet-pod-pod-pod[58792]: quadlet-pod Jan 11 11:33:42 managed-node3 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2136. Jan 11 11:33:42 managed-node3 podman[58848]: 2025-01-11 11:33:42.433638423 -0500 EST m=+0.035132717 container died 6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:42 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:33:42 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.4700] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:33:42 managed-node3 podman[58848]: 2025-01-11 11:33:42.524515891 -0500 EST m=+0.126010022 container cleanup 6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:42 managed-node3 podman[58848]: 2025-01-11 11:33:42.525432713 -0500 EST m=+0.126927037 pod stop f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16 (image=, name=quadlet-pod) Jan 11 11:33:42 managed-node3 systemd[1]: Removed slice machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice - cgroup machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice. ░░ Subject: A stop job for unit machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16.slice has finished. ░░ ░░ The job identifier is 2312 and the job result is done. Jan 11 11:33:42 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-pod-pod-pod.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:33:42 managed-node3 podman[58892]: 2025-01-11 11:33:42.661148895 -0500 EST m=+0.097324272 container remove 6c8d92c5ec67d49961786cdce57ba7a3e44a38fd50adecf116a31d7701eaaf1d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:42 managed-node3 podman[58892]: 2025-01-11 11:33:42.674472285 -0500 EST m=+0.110647644 pod remove f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16 (image=, name=quadlet-pod) Jan 11 11:33:42 managed-node3 quadlet-pod-pod-pod[58892]: f16523409f39cf797004f00ea00411addc703c19726919c5e9b032493d078a16 Jan 11 11:33:42 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:42 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Scheduled restart job, restart counter is at 1. ░░ Subject: Automatic restarting of a unit has been scheduled ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Automatic restarting of the unit quadlet-pod-pod-pod.service has been scheduled, as the result for ░░ the configured Restart= setting for the unit. Jan 11 11:33:42 managed-node3 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2314. Jan 11 11:33:42 managed-node3 systemd[1]: Created slice machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice - cgroup machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice. ░░ Subject: A start job for unit machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice has finished successfully. ░░ ░░ The job identifier is 2398. Jan 11 11:33:42 managed-node3 podman[58903]: 2025-01-11 11:33:42.923499699 -0500 EST m=+0.077152560 container create 54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:42 managed-node3 podman[58903]: 2025-01-11 11:33:42.930487429 -0500 EST m=+0.084140382 pod create bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56 (image=, name=quadlet-pod) Jan 11 11:33:42 managed-node3 quadlet-pod-pod-pod[58903]: bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56 Jan 11 11:33:42 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:42 managed-node3 (udev-worker)[58814]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:33:42 managed-node3 NetworkManager[708]: [1736613222.9899] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:42 managed-node3 kernel: veth0: entered allmulticast mode Jan 11 11:33:42 managed-node3 kernel: veth0: entered promiscuous mode Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:42 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Jan 11 11:33:43 managed-node3 (udev-worker)[58824]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0004] device (podman0): carrier: link connected Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0008] device (veth0): carrier: link connected Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0011] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0146] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0156] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0169] device (podman0): Activation: starting connection 'podman0' (c078faf4-836c-4380-b46d-53e3e0090860) Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0171] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0187] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0192] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0196] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0361] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0365] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.0375] device (podman0): Activation: successful, device activated. Jan 11 11:33:43 managed-node3 systemd[1]: Started libpod-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27.scope - libcrun container. ░░ Subject: A start job for unit libpod-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27.scope has finished successfully. ░░ ░░ The job identifier is 2404. Jan 11 11:33:43 managed-node3 podman[58912]: 2025-01-11 11:33:43.094364165 -0500 EST m=+0.142302923 container init 54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 podman[58912]: 2025-01-11 11:33:43.097976845 -0500 EST m=+0.145915491 container start 54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 systemd[1]: libpod-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27.scope has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 podman[58912]: 2025-01-11 11:33:43.106739364 -0500 EST m=+0.154678108 pod start bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56 (image=, name=quadlet-pod) Jan 11 11:33:43 managed-node3 quadlet-pod-pod-pod[58912]: quadlet-pod Jan 11 11:33:43 managed-node3 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2314. Jan 11 11:33:43 managed-node3 podman[58988]: 2025-01-11 11:33:43.148845669 -0500 EST m=+0.033797455 container died 54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:43 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:33:43 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.1907] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:33:43 managed-node3 podman[58988]: 2025-01-11 11:33:43.245686434 -0500 EST m=+0.130638237 container cleanup 54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 podman[58988]: 2025-01-11 11:33:43.250670322 -0500 EST m=+0.135622172 pod stop bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56 (image=, name=quadlet-pod) Jan 11 11:33:43 managed-node3 systemd[1]: Removed slice machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice - cgroup machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice. ░░ Subject: A stop job for unit machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56.slice has finished. ░░ ░░ The job identifier is 2411 and the job result is done. Jan 11 11:33:43 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-pod-pod-pod.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:33:43 managed-node3 python3.12[59072]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:43 managed-node3 podman[59075]: 2025-01-11 11:33:43.460390803 -0500 EST m=+0.065346504 container remove 54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 podman[59075]: 2025-01-11 11:33:43.470818596 -0500 EST m=+0.075774258 pod remove bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56 (image=, name=quadlet-pod) Jan 11 11:33:43 managed-node3 quadlet-pod-pod-pod[59075]: bc423b6d07aada66ba1fcda2ef6493384b509b2b0ac34250b5d478b97f0b7f56 Jan 11 11:33:43 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:43 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Scheduled restart job, restart counter is at 2. ░░ Subject: Automatic restarting of a unit has been scheduled ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Automatic restarting of the unit quadlet-pod-pod-pod.service has been scheduled, as the result for ░░ the configured Restart= setting for the unit. Jan 11 11:33:43 managed-node3 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2413. Jan 11 11:33:43 managed-node3 systemd[1]: Created slice machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice - cgroup machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice. ░░ Subject: A start job for unit machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice has finished successfully. ░░ ░░ The job identifier is 2497. Jan 11 11:33:43 managed-node3 podman[59108]: 2025-01-11 11:33:43.665895514 -0500 EST m=+0.067028463 container create 26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 podman[59108]: 2025-01-11 11:33:43.670976812 -0500 EST m=+0.072109809 pod create a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67 (image=, name=quadlet-pod) Jan 11 11:33:43 managed-node3 quadlet-pod-pod-pod[59108]: a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67 Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:43 managed-node3 kernel: veth0: entered allmulticast mode Jan 11 11:33:43 managed-node3 kernel: veth0: entered promiscuous mode Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7221] device (podman0): carrier: link connected Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7224] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/7) Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7232] device (veth0): carrier: link connected Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7247] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7488] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7497] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7505] device (podman0): Activation: starting connection 'podman0' (65b239aa-8b9a-4693-af2d-f34968395b1b) Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7509] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7514] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7517] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7521] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7541] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7542] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.7546] device (podman0): Activation: successful, device activated. Jan 11 11:33:43 managed-node3 systemd[1]: Started libpod-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1.scope - libcrun container. ░░ Subject: A start job for unit libpod-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1.scope has finished successfully. ░░ ░░ The job identifier is 2503. Jan 11 11:33:43 managed-node3 podman[59117]: 2025-01-11 11:33:43.802088126 -0500 EST m=+0.116118046 container init 26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 podman[59117]: 2025-01-11 11:33:43.804936417 -0500 EST m=+0.118966414 container start 26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 systemd[1]: libpod-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1.scope has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 podman[59117]: 2025-01-11 11:33:43.812660115 -0500 EST m=+0.126690086 pod start a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67 (image=, name=quadlet-pod) Jan 11 11:33:43 managed-node3 quadlet-pod-pod-pod[59117]: quadlet-pod Jan 11 11:33:43 managed-node3 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2413. Jan 11 11:33:43 managed-node3 podman[59148]: 2025-01-11 11:33:43.842204827 -0500 EST m=+0.022015288 container died 26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:43 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:33:43 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:33:43 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:43 managed-node3 systemd[1]: run-netns-netns\x2d4ff228e8\x2d3ef0\x2d5a40\x2d0a90\x2d5d04e49b142b.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d4ff228e8\x2d3ef0\x2d5a40\x2d0a90\x2d5d04e49b142b.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 systemd[1]: var-lib-containers-storage-overlay-d3a043c760e53579abc09563f308ecda214f4d8c2fb56392504f327f596d81be-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-d3a043c760e53579abc09563f308ecda214f4d8c2fb56392504f327f596d81be-merged.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-54f36b75e0a8516df8638738a3d71cdf6afc22de0309de4fd6384155f35d4c27-userdata-shm.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 NetworkManager[708]: [1736613223.8791] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:33:43 managed-node3 systemd[1]: run-netns-netns\x2d52167144\x2d26e4\x2d8553\x2dc4ab\x2ddbe341587c2d.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d52167144\x2d26e4\x2d8553\x2dc4ab\x2ddbe341587c2d.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1-userdata-shm.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 systemd[1]: var-lib-containers-storage-overlay-4ace597ac1e8b5c7a95aa85a9f9a6c385ad9668276e22ab87a26bb34935edeee-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-4ace597ac1e8b5c7a95aa85a9f9a6c385ad9668276e22ab87a26bb34935edeee-merged.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 podman[59148]: 2025-01-11 11:33:43.928041874 -0500 EST m=+0.107852343 container cleanup 26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:43 managed-node3 podman[59148]: 2025-01-11 11:33:43.929103673 -0500 EST m=+0.108914146 pod stop a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67 (image=, name=quadlet-pod) Jan 11 11:33:43 managed-node3 systemd[1]: Removed slice machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice - cgroup machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice. ░░ Subject: A stop job for unit machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67.slice has finished. ░░ ░░ The job identifier is 2510 and the job result is done. Jan 11 11:33:43 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:43 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-pod-pod-pod.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:33:44 managed-node3 podman[59171]: 2025-01-11 11:33:44.092865512 -0500 EST m=+0.121909350 container remove 26b1ce47193e3f0dcefa616bdd79b59efdf36a418d5bd4b1de954f61869470a1 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:44 managed-node3 podman[59171]: 2025-01-11 11:33:44.103290226 -0500 EST m=+0.132334060 pod remove a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67 (image=, name=quadlet-pod) Jan 11 11:33:44 managed-node3 quadlet-pod-pod-pod[59171]: a0bd878a963f7e7e6d0f635e9b7e32693d81759388c8d1b35515744638f41b67 Jan 11 11:33:44 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:44 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Scheduled restart job, restart counter is at 3. ░░ Subject: Automatic restarting of a unit has been scheduled ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Automatic restarting of the unit quadlet-pod-pod-pod.service has been scheduled, as the result for ░░ the configured Restart= setting for the unit. Jan 11 11:33:44 managed-node3 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2512. Jan 11 11:33:44 managed-node3 systemd[1]: Created slice machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice - cgroup machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice. ░░ Subject: A start job for unit machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice has finished successfully. ░░ ░░ The job identifier is 2596. Jan 11 11:33:44 managed-node3 podman[59181]: 2025-01-11 11:33:44.431125744 -0500 EST m=+0.084935362 container create b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:44 managed-node3 podman[59181]: 2025-01-11 11:33:44.439107554 -0500 EST m=+0.092917156 pod create fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e (image=, name=quadlet-pod) Jan 11 11:33:44 managed-node3 quadlet-pod-pod-pod[59181]: fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e Jan 11 11:33:44 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:44 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:44 managed-node3 kernel: veth0: entered allmulticast mode Jan 11 11:33:44 managed-node3 kernel: veth0: entered promiscuous mode Jan 11 11:33:44 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:44 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.4955] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5018] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5038] device (veth0): carrier: link connected Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5055] device (podman0): carrier: link connected Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5287] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5294] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5303] device (podman0): Activation: starting connection 'podman0' (24ea35ac-2e29-4912-9ce3-1316d64cdeea) Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5305] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5310] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5312] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5316] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5328] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5330] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.5335] device (podman0): Activation: successful, device activated. Jan 11 11:33:44 managed-node3 systemd[1]: Started libpod-b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d.scope - libcrun container. ░░ Subject: A start job for unit libpod-b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d.scope has finished successfully. ░░ ░░ The job identifier is 2602. Jan 11 11:33:44 managed-node3 podman[59236]: 2025-01-11 11:33:44.57596445 -0500 EST m=+0.114151936 container init b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:44 managed-node3 podman[59236]: 2025-01-11 11:33:44.57891896 -0500 EST m=+0.117106429 container start b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:44 managed-node3 systemd[1]: libpod-b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d.scope has successfully entered the 'dead' state. Jan 11 11:33:44 managed-node3 podman[59236]: 2025-01-11 11:33:44.58689249 -0500 EST m=+0.125079901 pod start fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e (image=, name=quadlet-pod) Jan 11 11:33:44 managed-node3 quadlet-pod-pod-pod[59236]: quadlet-pod Jan 11 11:33:44 managed-node3 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2512. Jan 11 11:33:44 managed-node3 podman[59286]: 2025-01-11 11:33:44.627235873 -0500 EST m=+0.036804689 container died b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:44 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:44 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:33:44 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:33:44 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:44 managed-node3 NetworkManager[708]: [1736613224.6709] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:33:44 managed-node3 podman[59286]: 2025-01-11 11:33:44.732822318 -0500 EST m=+0.142391055 pod stop fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e (image=, name=quadlet-pod) Jan 11 11:33:44 managed-node3 podman[59286]: 2025-01-11 11:33:44.733345838 -0500 EST m=+0.142914621 container cleanup b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:44 managed-node3 systemd[1]: Removed slice machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice - cgroup machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice. ░░ Subject: A stop job for unit machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e.slice has finished. ░░ ░░ The job identifier is 2609 and the job result is done. Jan 11 11:33:44 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-pod-pod-pod.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:33:44 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:44 managed-node3 podman[59359]: 2025-01-11 11:33:44.964607034 -0500 EST m=+0.076955461 container remove b0825eeeca3a4e72ad1fe32b4af8844b78fd95aa5d155139e02d60e576710f3d (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:44 managed-node3 podman[59359]: 2025-01-11 11:33:44.978722022 -0500 EST m=+0.091069993 pod remove fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e (image=, name=quadlet-pod) Jan 11 11:33:44 managed-node3 quadlet-pod-pod-pod[59359]: fb1323c8fe85db26595b2d25cbff91cb490f290f1493f1ae5af62c740f9e370e Jan 11 11:33:44 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:44 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:45 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Scheduled restart job, restart counter is at 4. ░░ Subject: Automatic restarting of a unit has been scheduled ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Automatic restarting of the unit quadlet-pod-pod-pod.service has been scheduled, as the result for ░░ the configured Restart= setting for the unit. Jan 11 11:33:45 managed-node3 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2611. Jan 11 11:33:45 managed-node3 systemd[1]: Created slice machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice - cgroup machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice. ░░ Subject: A start job for unit machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice has finished successfully. ░░ ░░ The job identifier is 2695. Jan 11 11:33:45 managed-node3 podman[59378]: 2025-01-11 11:33:45.413300507 -0500 EST m=+0.066708719 container create cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:45 managed-node3 podman[59378]: 2025-01-11 11:33:45.418970807 -0500 EST m=+0.072378991 pod create 82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e (image=, name=quadlet-pod) Jan 11 11:33:45 managed-node3 quadlet-pod-pod-pod[59378]: 82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.4695] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/11) Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:45 managed-node3 kernel: veth0: entered allmulticast mode Jan 11 11:33:45 managed-node3 kernel: veth0: entered promiscuous mode Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.4814] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/12) Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.4822] device (veth0): carrier: link connected Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.4836] device (podman0): carrier: link connected Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5046] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5062] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5069] device (podman0): Activation: starting connection 'podman0' (829234d2-499e-4deb-9a30-f8535ff1629d) Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5070] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5072] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5074] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5076] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5106] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5108] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.5112] device (podman0): Activation: successful, device activated. Jan 11 11:33:45 managed-node3 systemd[1]: Started libpod-cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6.scope - libcrun container. ░░ Subject: A start job for unit libpod-cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6.scope has finished successfully. ░░ ░░ The job identifier is 2701. Jan 11 11:33:45 managed-node3 podman[59386]: 2025-01-11 11:33:45.564980188 -0500 EST m=+0.124304861 container init cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:45 managed-node3 podman[59386]: 2025-01-11 11:33:45.567724137 -0500 EST m=+0.127049102 container start cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:45 managed-node3 systemd[1]: libpod-cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6.scope has successfully entered the 'dead' state. Jan 11 11:33:45 managed-node3 podman[59386]: 2025-01-11 11:33:45.574299577 -0500 EST m=+0.133624167 pod start 82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e (image=, name=quadlet-pod) Jan 11 11:33:45 managed-node3 quadlet-pod-pod-pod[59386]: quadlet-pod Jan 11 11:33:45 managed-node3 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2611. Jan 11 11:33:45 managed-node3 podman[59418]: 2025-01-11 11:33:45.605089227 -0500 EST m=+0.023503344 container died cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:45 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:33:45 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:33:45 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Jan 11 11:33:45 managed-node3 NetworkManager[708]: [1736613225.6413] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:33:45 managed-node3 podman[59418]: 2025-01-11 11:33:45.691013863 -0500 EST m=+0.109427986 container cleanup cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 11 11:33:45 managed-node3 podman[59418]: 2025-01-11 11:33:45.692958697 -0500 EST m=+0.111372736 pod stop 82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e (image=, name=quadlet-pod) Jan 11 11:33:45 managed-node3 systemd[1]: Removed slice machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice - cgroup machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice. ░░ Subject: A stop job for unit machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e.slice has finished. ░░ ░░ The job identifier is 2708 and the job result is done. Jan 11 11:33:45 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-pod-pod-pod.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:33:45 managed-node3 podman[59441]: 2025-01-11 11:33:45.848995946 -0500 EST m=+0.122184958 container remove cdbc3bd97aa43f1997d0b11752a6f8084227ec6bbd95c23b9ed013f7281aa3d6 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jan 11 11:33:45 managed-node3 podman[59441]: 2025-01-11 11:33:45.860106345 -0500 EST m=+0.133295323 pod remove 82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e (image=, name=quadlet-pod) Jan 11 11:33:45 managed-node3 quadlet-pod-pod-pod[59441]: 82e8ca66ea57f2a8bc3fcc5e880d61f64635c55c5f22f69b5176443887e7721e Jan 11 11:33:45 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:46 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Scheduled restart job, restart counter is at 5. ░░ Subject: Automatic restarting of a unit has been scheduled ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Automatic restarting of the unit quadlet-pod-pod-pod.service has been scheduled, as the result for ░░ the configured Restart= setting for the unit. Jan 11 11:33:46 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Start request repeated too quickly. Jan 11 11:33:46 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:46 managed-node3 systemd[1]: Failed to start quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished with a failure. ░░ ░░ The job identifier is 2710 and the job result is failed. Jan 11 11:33:47 managed-node3 podman[59366]: 2025-01-11 11:33:47.404358497 -0500 EST m=+2.491863471 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 11 11:33:47 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:47 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:47 managed-node3 python3.12[59607]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:48 managed-node3 python3.12[59738]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:33:48 managed-node3 python3.12[59843]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613228.00213-17713-248510374954123/.source.container dest=/etc/containers/systemd/quadlet-pod-container.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:49 managed-node3 python3.12[59974]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:33:49 managed-node3 systemd[1]: Reload requested from client PID 59975 ('systemctl') (unit session-7.scope)... Jan 11 11:33:49 managed-node3 systemd[1]: Reloading... Jan 11 11:33:49 managed-node3 systemd-rc-local-generator[60018]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:33:49 managed-node3 systemd-ssh-generator[60020]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:33:49 managed-node3 (sd-exec-[59993]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:33:49 managed-node3 systemd[1]: Reloading finished in 205 ms. Jan 11 11:33:49 managed-node3 python3.12[60158]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:33:49 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Start request repeated too quickly. Jan 11 11:33:49 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:33:49 managed-node3 systemd[1]: Failed to start quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished with a failure. ░░ ░░ The job identifier is 2795 and the job result is failed. Jan 11 11:33:49 managed-node3 systemd[1]: Dependency failed for quadlet-pod-container.service. ░░ Subject: A start job for unit quadlet-pod-container.service has failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-container.service has finished with a failure. ░░ ░░ The job identifier is 2794 and the job result is dependency. Jan 11 11:33:49 managed-node3 systemd[1]: quadlet-pod-container.service: Job quadlet-pod-container.service/start failed with result 'dependency'. Jan 11 11:33:50 managed-node3 python3.12[60292]: ansible-ansible.legacy.command Invoked with _raw_params=set -x set -o pipefail exec 1>&2 #podman volume rm --all #podman network prune -f podman volume ls podman network ls podman secret ls podman container ls podman pod ls podman images systemctl list-units | grep quadlet systemctl list-unit-files | grep quadlet ls -alrtF /etc/containers/systemd /usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:50 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:50 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:51 managed-node3 python3.12[60475]: ansible-ansible.legacy.command Invoked with _raw_params=grep type=AVC /var/log/audit/audit.log _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:51 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:51 managed-node3 python3.12[60608]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:53 managed-node3 python3.12[60871]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:54 managed-node3 python3.12[61009]: ansible-getent Invoked with database=passwd key=user_quadlet_pod fail_key=False service=None split=None Jan 11 11:33:54 managed-node3 python3.12[61141]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node3 : ok=66 changed=4 unreachable=0 failed=2 skipped=87 rescued=2 ignored=0 TASKS RECAP ******************************************************************** Saturday 11 January 2025 11:33:54 -0500 (0:00:00.429) 0:00:24.000 ****** =============================================================================== fedora.linux_system_roles.podman : Ensure container images are present --- 3.18s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Start service ------------------------ 1.21s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Gathering Facts --------------------------------------------------------- 1.20s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.10s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Reload systemctl --------------------- 1.02s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Debug3 ------------------------------------------------------------------ 0.90s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:127 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.87s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.85s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Reload systemctl --------------------- 0.74s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.69s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 fedora.linux_system_roles.podman : Start service ------------------------ 0.53s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Dump journal ------------------------------------------------------------ 0.50s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:151 fedora.linux_system_roles.podman : Get podman version ------------------- 0.48s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 fedora.linux_system_roles.podman : See if getsubids exists -------------- 0.47s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 fedora.linux_system_roles.podman : Ensure the quadlet directory is present --- 0.47s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 fedora.linux_system_roles.podman : Get user information ----------------- 0.47s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 fedora.linux_system_roles.podman : Check if system is ostree ------------ 0.45s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Dump journal ------------------------------------------------------------ 0.43s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:194 fedora.linux_system_roles.podman : Get podman version ------------------- 0.39s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Check AVCs -------------------------------------------------------------- 0.39s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:146